Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 13 additions & 13 deletions airflow/contrib/hooks/gcp_function_hook.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
# noinspection PyAbstractClass
class GcfHook(GoogleCloudBaseHook):
"""
Hook for Google Cloud Functions APIs.
Hook for the Google Cloud Functions APIs.
"""
_conn = None

Expand All @@ -48,7 +48,7 @@ def __init__(self,

def get_conn(self):
"""
Retrieves connection to cloud functions.
Retrieves the connection to Cloud Functions.

:return: Google Cloud Function services object
:rtype: dict
Expand All @@ -61,7 +61,7 @@ def get_conn(self):

def get_function(self, name):
"""
Returns the function with a given name.
Returns the Cloud Function with the given name.

:param name: name of the function
:type name: str
Expand All @@ -73,9 +73,9 @@ def get_function(self, name):

def list_functions(self, full_location):
"""
Lists all functions created in the location.
Lists all Cloud Functions created in the location.

:param full_location: full location including project. On the form
:param full_location: full location including the project in the form of
of /projects/<PROJECT>/location/<LOCATION>
:type full_location: str
:return: array of CloudFunction objects - representing functions in the location
Expand All @@ -87,12 +87,12 @@ def list_functions(self, full_location):

def create_new_function(self, full_location, body):
"""
Creates new cloud function in location given with body specified.
Creates a new function in Cloud Function in the location specified in the body.

:param full_location: full location including project. On the form
:param full_location: full location including the project in the form of
of /projects/<PROJECT>/location/<LOCATION>
:type full_location: str
:param body: body required by the cloud function insert API
:param body: body required by the Cloud Functions insert API
:type body: dict
:return: response returned by the operation
:rtype: dict
Expand All @@ -106,7 +106,7 @@ def create_new_function(self, full_location, body):

def update_function(self, name, body, update_mask):
"""
Updates cloud function according to the update mask specified.
Updates Cloud Functions according to the specified update mask.

:param name: name of the function
:type name: str
Expand All @@ -129,10 +129,10 @@ def upload_function_zip(self, parent, zip_path):
"""
Uploads zip file with sources.

:param parent: project and location in which signed upload URL should be generated
in the form of /projects/<PROJECT>/location/<LOCATION>
:param parent: Google Cloud Platform project id and region where zip file should
be uploaded in the form of /projects/<PROJECT>/location/<LOCATION>
:type parent: str
:param zip_path: path of the file to upload (should point to valid .zip file)
:param zip_path: path of the valid .zip file to upload
:type zip_path: str
:return: Upload URL that was returned by generateUploadUrl method
"""
Expand All @@ -156,7 +156,7 @@ def upload_function_zip(self, parent, zip_path):

def delete_function(self, name):
"""
Deletes cloud function specified by name.
Deletes the specified Cloud Function.

:param name: name of the function
:type name: str
Expand Down
35 changes: 18 additions & 17 deletions airflow/contrib/operators/gcp_function_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,28 +80,29 @@ def _validate_max_instances(value):

class GcfFunctionDeployOperator(BaseOperator):
"""
Create a function in Google Cloud Functions.
Creates a function in Google Cloud Functions.

:param project_id: Project ID that the operator works on
:param project_id: Google Cloud Platform Project ID where the function should
be created.
:type project_id: str
:param location: Region where the operator operates on
:param location: Google Cloud Platform region where the function should be created.
:type location: str
:param body: Body of the cloud function definition. The body must be a CloudFunction
dictionary as described in:
:param body: Body of the Cloud Functions definition. The body must be a
Cloud Functions dictionary as described in:
https://cloud.google.com/functions/docs/reference/rest/v1/projects.locations.functions
(note that different API versions require different
variants of the CloudFunction dictionary)
. Different API versions require different variants of the Cloud Functions
dictionary.
:type body: dict or google.cloud.functions.v1.CloudFunction
:param gcp_conn_id: The connection ID to use connecting to Google Cloud Platform.
:param gcp_conn_id: The connection ID to use to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: Version of the API used (for example v1).
:param api_version: API version used (for example v1 or v1beta1).
:type api_version: str
:param zip_path: Path to zip file containing source code of the function. If it is
set, then sourceUploadUrl should not be specified in the body (or it should
be empty), then the zip file will be uploaded using upload URL generated
via generateUploadUrl from cloud functions API
:param zip_path: Path to zip file containing source code of the function. If the path
is set, the sourceUploadUrl should not be specified in the body or it should
be empty. Then the zip file will be uploaded using the upload URL generated
via generateUploadUrl from the Cloud Functions API.
:type zip_path: str
:param validate_body: If set to False, no body validation is performed.
:param validate_body: If set to False, body validation is not performed.
:type validate_body: bool
"""

Expand Down Expand Up @@ -265,14 +266,14 @@ def preprocess_body(self):

class GcfFunctionDeleteOperator(BaseOperator):
"""
Delete a function with specified name from Google Cloud Functions.
Deletes the specified function from Google Cloud Functions.

:param name: A fully-qualified function name, matching
the pattern: `^projects/[^/]+/locations/[^/]+/functions/[^/]+$`
:type name: str
:param gcp_conn_id: The connection ID to use connecting to Google Cloud Platform.
:param gcp_conn_id: The connection ID to use to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: Version of the API used (for example v1).
:param api_version: API version used (for example v1 or v1beta1).
:type api_version: str
"""

Expand Down
66 changes: 36 additions & 30 deletions docs/howto/operator.rst
Original file line number Diff line number Diff line change
Expand Up @@ -179,15 +179,18 @@ to delete a function from Google Cloud Functions.

Troubleshooting
"""""""""""""""
If you want to run or deploy an operator using a service account and get “forbidden 403”
errors, it means that your service account does not have the correct
Cloud IAM permissions.

In case you want to run deploy operator using a service account and get "forbidden 403"
errors, it means that your service account has not enough permissions set via IAM.
1. Assign your Service Account the Cloud Functions Developer role.
2. Grant the user the Cloud IAM Service Account User role on the Cloud Functions runtime
service account.

The typical way of assigning Cloud IAM permissions with `gcloud` is
shown below. Just replace PROJECT_ID with ID of your Google Cloud Platform project
and SERVICE_ACCOUNT_EMAIL with the email ID of your service account.

* First you need to Assign your Service Account "Cloud Functions Developer" role
* Make sure you grant the user the IAM Service Account User role on the Cloud Functions
Runtime service account. Typical way of doing it with gcloud is shown below - just
replace PROJECT_ID with ID of your project and SERVICE_ACCOUNT_EMAIL with the email id
of your service account.

.. code-block:: bash

Expand All @@ -205,40 +208,40 @@ GcfFunctionDeployOperator
Use the :class:`~airflow.contrib.operators.gcp_function_operator.GcfFunctionDeployOperator`
to deploy a function from Google Cloud Functions.

The examples below use Airflow variables defined in order to show various variants and
combinations of default_args you can use. The variables are defined as follows:
The following examples of Airflow variables show various variants and combinations
of default_args that you can use. The variables are defined as follows:

.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_function_deploy_delete.py
:language: python
:start-after: [START howto_operator_gcf_deploy_variables]
:end-before: [END howto_operator_gcf_deploy_variables]

With those variables one can define body of the request:
With those variables you can define the body of the request:

.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_function_deploy_delete.py
:language: python
:start-after: [START howto_operator_gcf_deploy_body]
:end-before: [END howto_operator_gcf_deploy_body]

The default_args dictionary when you create DAG can be used to pass body and other
arguments:
When you create a DAG, the default_args dictionary can be used to pass the body and
other arguments:

.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_function_deploy_delete.py
:language: python
:start-after: [START howto_operator_gcf_deploy_args]
:end-before: [END howto_operator_gcf_deploy_args]

Note that the neither the body nor default args are complete in the above examples.
Depending on the variables set there might be different variants on how to pass
source code related fields. Currently you can pass either
`sourceArchiveUrl`, `sourceRepository` or `sourceUploadUrl` as described in
Note that the neither the body nor the default args are complete in the above examples.
Depending on the set variables, there might be different variants on how to pass source
code related fields. Currently, you can pass either sourceArchiveUrl, sourceRepository
or sourceUploadUrl as described in the
`CloudFunction API specification <https://cloud.google.com/functions/docs/reference/rest/v1/projects.locations.functions#CloudFunction>`_.
Additionally default_args might contain `zip_path` parameter to run extra step
of uploading the source code before deploying it. In the last case you also need to
Additionally, default_args might contain zip_path parameter to run the extra step of
uploading the source code before deploying it. In the last case, you also need to
provide an empty `sourceUploadUrl` parameter in the body.

Example logic of setting the source code related fields based on variables defined above
is shown here:
Based on the variables defined above, example logic of setting the source code
related fields is shown here:

.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_function_deploy_delete.py
:language: python
Expand All @@ -255,14 +258,17 @@ The code to create the operator:
Troubleshooting
"""""""""""""""

In case you want to run deploy operator using a service account and get "forbidden 403"
errors, it means that your service account has not enough permissions set via IAM.
If you want to run or deploy an operator using a service account and get “forbidden 403”
errors, it means that your service account does not have the correct
Cloud IAM permissions.

1. Assign your Service Account the Cloud Functions Developer role.
2. Grant the user the Cloud IAM Service Account User role on the Cloud Functions runtime
service account.

* First you need to Assign your Service Account "Cloud Functions Developer" role
* Make sure you grant the user the IAM Service Account User role on the Cloud Functions
Runtime service account. Typical way of doing it with gcloud is shown below - just
replace PROJECT_ID with ID of your project and SERVICE_ACCOUNT_EMAIL with the email id
of your service account.
The typical way of assigning Cloud IAM permissions with `gcloud` is
shown below. Just replace PROJECT_ID with ID of your Google Cloud Platform project
and SERVICE_ACCOUNT_EMAIL with the email ID of your service account.

.. code-block:: bash

Expand All @@ -274,6 +280,6 @@ of your service account.

See `Adding the IAM service agent user role to the runtime service <https://cloud.google.com/functions/docs/reference/iam/roles#adding_the_iam_service_agent_user_role_to_the_runtime_service_account>`_ for details

Also make sure that your service account has access to the source code of function
in case it should be downloaded. It might mean that you add Source Repository Viewer
role to the service account in case the source code is in Google Source Repository.
If the source code for your function is in Google Source Repository, make sure that
your service account has the Source Repository Viewer role so that the source code
can be downloaded if necessary.
4 changes: 2 additions & 2 deletions docs/integration.rst
Original file line number Diff line number Diff line change
Expand Up @@ -538,8 +538,8 @@ Cloud Functions
Cloud Functions Operators
"""""""""""""""""""""""""

- :ref:`GcfFunctionDeployOperator` : deploy Google Cloud Function to the cloud.
- :ref:`GcfFunctionDeleteOperator` : delete Google Cloud Function in the cloud.
- :ref:`GcfFunctionDeployOperator` : deploy Google Cloud Function to Google Cloud Platform
- :ref:`GcfFunctionDeleteOperator` : delete Google Cloud Function in Google Cloud Platform

.. autoclass:: airflow.contrib.operators.gcp_operator.GCP

Expand Down