From 09b1c006c27233cf9b8c3d3869af09a747c3b45f Mon Sep 17 00:00:00 2001 From: Jiajie Zhong Date: Tue, 8 Feb 2022 18:45:31 +0800 Subject: [PATCH] [doc] Improve s3 operator example by adding task upload_keys --- airflow/providers/amazon/aws/example_dags/example_s3_bucket.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/airflow/providers/amazon/aws/example_dags/example_s3_bucket.py b/airflow/providers/amazon/aws/example_dags/example_s3_bucket.py index 9dbbe4ae85ec9..538538d6c0ea8 100644 --- a/airflow/providers/amazon/aws/example_dags/example_s3_bucket.py +++ b/airflow/providers/amazon/aws/example_dags/example_s3_bucket.py @@ -28,6 +28,7 @@ BUCKET_NAME = os.environ.get('BUCKET_NAME', 'test-airflow-12345') +# [START howto_operator_s3_bucket] @task(task_id="s3_bucket_dag_add_keys_to_bucket") def upload_keys(): """This is a python callback to add keys into the s3 bucket""" @@ -41,7 +42,6 @@ def upload_keys(): ) -# [START howto_operator_s3_bucket] with DAG( dag_id='s3_bucket_dag', schedule_interval=None,