diff --git a/airflow/providers/snowflake/example_dags/__init__.py b/airflow/providers/snowflake/example_dags/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/airflow/providers/snowflake/example_dags/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/docs/apache-airflow-providers-snowflake/index.rst b/docs/apache-airflow-providers-snowflake/index.rst index 46693045d4bc2..6a583468585d0 100644 --- a/docs/apache-airflow-providers-snowflake/index.rst +++ b/docs/apache-airflow-providers-snowflake/index.rst @@ -39,7 +39,7 @@ Content :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-snowflake/operators/s3_to_snowflake.rst b/docs/apache-airflow-providers-snowflake/operators/s3_to_snowflake.rst index f7e6111399d5b..a2b7bb762bc8c 100644 --- a/docs/apache-airflow-providers-snowflake/operators/s3_to_snowflake.rst +++ b/docs/apache-airflow-providers-snowflake/operators/s3_to_snowflake.rst @@ -36,7 +36,7 @@ a file format (see `docs > [ - snowflake_op_with_params, - snowflake_op_sql_list, - snowflake_op_template_file, - copy_into_table, - snowflake_op_sql_multiple_stmts, - ] - >> slack_report -) +) as dag: + # [START snowflake_example_dag] + snowflake_op_sql_str = SnowflakeOperator( + task_id='snowflake_op_sql_str', + sql=CREATE_TABLE_SQL_STRING, + warehouse=SNOWFLAKE_WAREHOUSE, + database=SNOWFLAKE_DATABASE, + schema=SNOWFLAKE_SCHEMA, + role=SNOWFLAKE_ROLE, + ) + + snowflake_op_with_params = SnowflakeOperator( + task_id='snowflake_op_with_params', + sql=SQL_INSERT_STATEMENT, + parameters={"id": 56}, + warehouse=SNOWFLAKE_WAREHOUSE, + database=SNOWFLAKE_DATABASE, + schema=SNOWFLAKE_SCHEMA, + role=SNOWFLAKE_ROLE, + ) + + snowflake_op_sql_list = SnowflakeOperator(task_id='snowflake_op_sql_list', sql=SQL_LIST) + + snowflake_op_sql_multiple_stmts = SnowflakeOperator( + task_id='snowflake_op_sql_multiple_stmts', + sql=SQL_MULTIPLE_STMTS, + ) + + snowflake_op_template_file = SnowflakeOperator( + task_id='snowflake_op_template_file', + sql='/path/to/sql/.sql', + ) + + # [END howto_operator_snowflake] + + # [START howto_operator_s3_to_snowflake] + + copy_into_table = S3ToSnowflakeOperator( + task_id='copy_into_table', + s3_keys=[S3_FILE_PATH], + table=SNOWFLAKE_SAMPLE_TABLE, + schema=SNOWFLAKE_SCHEMA, + stage=SNOWFLAKE_STAGE, + file_format="(type = 'CSV',field_delimiter = ';')", + ) + + # [END howto_operator_s3_to_snowflake] + + # [START howto_operator_snowflake_to_slack] + + slack_report = SnowflakeToSlackOperator( + task_id="slack_report", + sql=SNOWFLAKE_SLACK_SQL, + slack_message=SNOWFLAKE_SLACK_MESSAGE, + slack_conn_id=SLACK_CONN_ID, + ) + + # [END howto_operator_snowflake_to_slack] + + ( + snowflake_op_sql_str + >> [ + snowflake_op_with_params, + snowflake_op_sql_list, + snowflake_op_template_file, + copy_into_table, + snowflake_op_sql_multiple_stmts, + ] + >> slack_report + ) + # [END snowflake_example_dag] + + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +test_run = get_test_run(dag)