From 4b8e4a2ca5f2b904f47398182b6076f4d61b8d6e Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Sun, 9 Feb 2025 13:25:19 +0100 Subject: [PATCH] Removed the unused provider's distribution This is a set of cleanup steps (first stage) that allow us to remove the "intermediate" provider's distribution from Airlfow code and replace it fully with individual provider's distributions - already with own `pyproject.toml` files and basically being (when we complete) a completely separate distributions from Airflow and without implicit dependencies between unrelated distributions. There are a number of other changes needed but that one is only focusing on removing all references to the "umbrella" `providers` distribution and consequences of removing it. Those are the changes implemented in this PR: * There are no separate "providers" system tests - each provider has own system tests and there are no common "generic" providers empty system test * Integration tests are moved to respective providers under the `integration` package inside `tests` directory * (nearly) empty __init__.py files are added in `tests` directories of providers - this way "tests" becomes just a directory and root for all tests per provider, rather than a Python package on its own. That allows to use "from integration.PROVIDER import" and "from system.PROVIDER" rather than importing them from the root of the whole airflow project. The (nearly) is because we need to handle multiple "system", "system.apache" and other import locations. * Removed references to "providers/" generic package which were scheduled for removal after all providers are moved to the new structure * Few remaining references / links referring to old "providers/src" and "providers/test" have been fixed. * The "conftest.py" files in all providers are trimmed down - the code to store ignored deprecation warnings have been moved to the test_common pytest_plugin. That allows to remove 90+ duplicated snippets of deprecation_warnings retrieval while keeping the warnings per-provider in the provider's distribution. * The "moving_providers" scripts are removed. They've done their job and are not needed any more - we keep them in history * The __init__.py files are automatically checked and properly updated in provider folders - in order to properly handle path extension mechanisms * The www tests that were using FAB permisssion model are moved to the FAB provider tests. --- .../workflows/integration-system-tests.yml | 3 +- .github/workflows/special-tests.yml | 2 +- .pre-commit-config.yaml | 8 +- PROVIDERS.rst | 2 +- contributing-docs/07_local_virtualenv.rst | 26 +- contributing-docs/testing/system_tests.rst | 8 +- contributing-docs/testing/unit_tests.rst | 6 +- dev/README_AIRFLOW3_DEV.md | 2 +- .../commands/testing_commands.py | 2 +- .../src/airflow_breeze/utils/run_tests.py | 75 +- .../airflow_breeze/utils/selective_checks.py | 9 - .../tests/test_pytest_args_for_test_types.py | 46 +- dev/breeze/tests/test_run_test_args.py | 17 +- dev/breeze/tests/test_selective_checks.py | 12 +- dev/example_dags/README.md | 34 - dev/example_dags/update_example_dags_paths.py | 118 --- dev/moving_providers/README.md | 120 --- dev/moving_providers/move_providers.py | 757 ------------------ docs/apache-airflow-providers/index.rst | 2 +- docs/conf.py | 25 +- docs/exts/docs_build/docs_builder.py | 2 +- docs/exts/exampleinclude.py | 6 +- docs/spelling_wordlist.txt | 1 + .../MANAGING_PROVIDERS_LIFECYCLE.rst | 51 +- providers/airbyte/tests/conftest.py | 13 - .../{ => airbyte/tests/system}/__init__.py | 0 providers/alibaba/docs/index.rst | 2 +- providers/alibaba/tests/conftest.py | 13 - .../tests/system}/__init__.py | 0 .../setup/amazon-verified-permissions.rst | 4 +- .../amazon/docs/executors/batch-executor.rst | 2 +- .../amazon/docs/executors/ecs-executor.rst | 2 +- .../aws/auth_manager/aws_auth_manager.py | 2 +- .../providers/amazon/aws/triggers/README.md | 2 +- providers/amazon/tests/conftest.py | 13 - .../aws/auth_manager/router}/__init__.py | 0 .../amazon/aws/system/utils/test_helpers.py | 6 +- .../hooks => amazon/tests/system}/__init__.py | 1 + .../amazon/tests/system/amazon/README.md | 14 +- .../system/amazon/aws/example_appflow.py | 3 +- .../system/amazon/aws/example_appflow_run.py | 2 +- .../tests/system/amazon/aws/example_athena.py | 2 +- .../amazon/aws/example_azure_blob_to_s3.py | 3 +- .../tests/system/amazon/aws/example_batch.py | 12 +- .../system/amazon/aws/example_bedrock.py | 2 +- .../example_bedrock_retrieve_and_generate.py | 2 +- .../amazon/aws/example_cloudformation.py | 3 +- .../system/amazon/aws/example_comprehend.py | 3 +- .../example_comprehend_document_classifier.py | 3 +- .../system/amazon/aws/example_datasync.py | 2 +- .../tests/system/amazon/aws/example_dms.py | 4 +- .../amazon/aws/example_dms_serverless.py | 2 +- .../system/amazon/aws/example_dynamodb.py | 2 +- .../amazon/aws/example_dynamodb_to_s3.py | 2 +- .../tests/system/amazon/aws/example_ec2.py | 2 +- .../tests/system/amazon/aws/example_ecs.py | 2 +- .../system/amazon/aws/example_ecs_fargate.py | 2 +- .../amazon/aws/example_eks_templated.py | 3 +- .../example_eks_with_fargate_in_one_step.py | 5 +- .../aws/example_eks_with_fargate_profile.py | 5 +- .../example_eks_with_nodegroup_in_one_step.py | 4 +- .../amazon/aws/example_eks_with_nodegroups.py | 4 +- .../tests/system/amazon/aws/example_emr.py | 2 +- .../system/amazon/aws/example_emr_eks.py | 2 +- .../aws/example_emr_notebook_execution.py | 3 +- .../amazon/aws/example_emr_serverless.py | 2 +- .../system/amazon/aws/example_eventbridge.py | 3 +- .../system/amazon/aws/example_ftp_to_s3.py | 3 +- .../system/amazon/aws/example_gcs_to_s3.py | 3 +- .../amazon/aws/example_glacier_to_gcs.py | 2 +- .../tests/system/amazon/aws/example_glue.py | 2 +- .../amazon/aws/example_glue_data_quality.py | 3 +- ...e_glue_data_quality_with_recommendation.py | 3 +- .../amazon/aws/example_glue_databrew.py | 2 +- .../aws/example_google_api_sheets_to_s3.py | 3 +- .../aws/example_google_api_youtube_to_s3.py | 2 +- .../amazon/aws/example_hive_to_dynamodb.py | 3 +- .../system/amazon/aws/example_http_to_s3.py | 3 +- .../aws/example_imap_attachment_to_s3.py | 3 +- .../amazon/aws/example_kinesis_analytics.py | 2 +- .../tests/system/amazon/aws/example_lambda.py | 2 +- .../system/amazon/aws/example_local_to_s3.py | 3 +- .../system/amazon/aws/example_mongo_to_s3.py | 3 +- .../tests/system/amazon/aws/example_mwaa.py | 3 +- .../system/amazon/aws/example_neptune.py | 3 +- .../system/amazon/aws/example_quicksight.py | 2 +- .../system/amazon/aws/example_rds_event.py | 2 +- .../system/amazon/aws/example_rds_export.py | 3 +- .../system/amazon/aws/example_rds_instance.py | 3 +- .../system/amazon/aws/example_rds_snapshot.py | 3 +- .../system/amazon/aws/example_redshift.py | 3 +- .../aws/example_redshift_s3_transfers.py | 3 +- .../tests/system/amazon/aws/example_s3.py | 3 +- .../amazon/aws/example_s3_to_dynamodb.py | 2 +- .../system/amazon/aws/example_s3_to_ftp.py | 3 +- .../system/amazon/aws/example_s3_to_sftp.py | 3 +- .../system/amazon/aws/example_s3_to_sql.py | 3 +- .../system/amazon/aws/example_sagemaker.py | 2 +- .../amazon/aws/example_sagemaker_endpoint.py | 4 +- .../amazon/aws/example_sagemaker_notebook.py | 3 +- .../amazon/aws/example_sagemaker_pipeline.py | 4 +- .../amazon/aws/example_salesforce_to_s3.py | 3 +- .../system/amazon/aws/example_sftp_to_s3.py | 3 +- .../tests/system/amazon/aws/example_sns.py | 2 +- .../system/amazon/aws/example_sql_to_s3.py | 3 +- .../tests/system/amazon/aws/example_sqs.py | 3 +- .../amazon/aws/example_step_functions.py | 3 +- .../amazon/aws/tests/test_aws_auth_manager.py | 2 +- .../tests/system/amazon/aws/utils/__init__.py | 2 +- .../beam/docs/_api/tests/system}/__init__.py | 1 + .../_api/tests/system/apache}/__init__.py | 2 +- .../airflow/providers/apache/beam/README.md | 14 +- providers/apache/beam/tests/conftest.py | 13 - .../beam/tests/system}/__init__.py | 2 +- .../beam/tests/system/apache/__init__.py | 17 + .../tests/system/apache/beam/example_beam.py | 9 +- .../apache/beam/example_beam_java_flink.py | 9 +- .../apache/beam/example_beam_java_spark.py | 9 +- .../tests/system/apache/beam/example_go.py | 9 +- .../system/apache/beam/example_go_dataflow.py | 13 +- .../apache/beam/example_java_dataflow.py | 9 +- .../system/apache/beam/example_python.py | 9 +- .../apache/beam/example_python_async.py | 9 +- .../apache/beam/example_python_dataflow.py | 13 +- providers/apache/cassandra/tests/conftest.py | 13 - .../cassandra/tests/integration/__init__.py | 17 + .../tests/integration/apache/__init__.py | 17 + .../integration/apache/cassandra}/__init__.py | 0 .../apache/cassandra/hooks}/__init__.py | 0 .../apache/cassandra/hooks/test_cassandra.py | 0 .../apache/cassandra/tests/system/__init__.py | 17 + .../cassandra/tests/system/apache/__init__.py | 17 + providers/apache/drill/tests/conftest.py | 13 - .../drill/tests/integration/__init__.py | 17 + .../tests/integration/apache/__init__.py | 17 + .../integration/apache/drill}/__init__.py | 0 .../apache/drill/hooks}/__init__.py | 0 .../apache/drill/hooks/test_drill.py | 0 .../apache/drill/operators}/__init__.py | 0 .../apache/drill/tests/system/__init__.py | 17 + .../drill/tests/system/apache/__init__.py | 17 + providers/apache/druid/tests/conftest.py | 13 - .../apache/druid/tests/system/__init__.py | 17 + .../druid/tests/system/apache/__init__.py | 17 + providers/apache/flink/docs/index.rst | 2 +- providers/apache/flink/tests/conftest.py | 13 - providers/apache/hdfs/tests/conftest.py | 13 - providers/apache/hive/tests/conftest.py | 13 - .../apache/hive/tests/integration/__init__.py | 17 + .../hive/tests/integration/apache/__init__.py | 17 + .../integration/apache/hive}/__init__.py | 0 .../apache/hive/transfers}/__init__.py | 0 .../hive/transfers/test_mssql_to_hive.py | 0 .../apache/hive/transfers/test_s3_to_hive.py | 5 +- .../apache/hive/tests/system/__init__.py | 17 + .../hive/tests/system/apache/__init__.py | 17 + providers/apache/iceberg/tests/conftest.py | 13 - .../apache/iceberg/tests/system/__init__.py | 17 + .../iceberg/tests/system/apache/__init__.py | 17 + providers/apache/impala/tests/conftest.py | 13 - providers/apache/kafka/tests/conftest.py | 13 - .../kafka/tests/integration/__init__.py | 17 + .../tests/integration/apache/__init__.py | 17 + .../integration/apache/kafka}/__init__.py | 0 .../apache/kafka/hooks}/__init__.py | 0 .../apache/kafka/hooks/test_admin_client.py | 0 .../apache/kafka/hooks/test_consumer.py | 0 .../apache/kafka/hooks/test_producer.py | 0 .../apache/kafka/operators}/__init__.py | 0 .../apache/kafka/operators/test_consume.py | 2 +- .../apache/kafka/operators/test_produce.py | 2 +- .../apache/kafka/sensors}/__init__.py | 0 .../apache/kafka/triggers}/__init__.py | 0 .../kafka/triggers/test_await_message.py | 2 +- .../apache/kafka/tests/system/__init__.py | 17 + .../kafka/tests/system/apache/__init__.py | 17 + providers/apache/kylin/tests/conftest.py | 13 - .../apache/kylin/tests/system/__init__.py | 17 + .../kylin/tests/system/apache/__init__.py | 17 + providers/apache/livy/tests/conftest.py | 13 - .../apache/livy/tests/system/__init__.py | 17 + .../livy/tests/system/apache/__init__.py | 17 + providers/apache/pig/tests/conftest.py | 13 - providers/apache/pig/tests/system/__init__.py | 17 + .../pig/tests/system/apache/__init__.py | 17 + providers/apache/pinot/tests/conftest.py | 13 - .../pinot/tests/integration/__init__.py | 17 + .../tests/integration/apache/__init__.py | 17 + .../integration/apache/pinot/__init__.py | 17 + .../apache/pinot/hooks}/__init__.py | 0 .../apache/pinot/hooks/test_pinot.py | 0 .../apache/pinot/tests/system/__init__.py | 17 + .../pinot/tests/system/apache/__init__.py | 17 + providers/apache/spark/tests/conftest.py | 13 - .../apache/spark/tests/system/__init__.py | 17 + .../spark/tests/system/apache/__init__.py | 17 + providers/apprise/tests/conftest.py | 13 - providers/arangodb/tests/conftest.py | 13 - .../airflow/providers/asana/hooks/asana.py | 1 - providers/asana/tests/conftest.py | 13 - providers/asana/tests/system/__init__.py | 17 + providers/atlassian/jira/tests/conftest.py | 13 - providers/celery/tests/conftest.py | 13 - providers/cloudant/tests/conftest.py | 13 - providers/cncf/kubernetes/docs/index.rst | 2 +- .../backcompat/backwards_compat_converters.py | 2 +- providers/cncf/kubernetes/tests/conftest.py | 13 - .../cncf/kubernetes/tests/system/__init__.py | 17 + .../kubernetes/tests/system/cncf/__init__.py | 17 + .../cncf/kubernetes/example_kubernetes.py | 3 +- .../kubernetes/example_kubernetes_async.py | 3 +- .../kubernetes/example_kubernetes_kueue.py | 3 +- providers/cohere/tests/conftest.py | 13 - providers/cohere/tests/system/__init__.py | 17 + providers/common/compat/tests/conftest.py | 13 - providers/common/io/tests/conftest.py | 13 - providers/common/io/tests/system/__init__.py | 17 + .../common/io/tests/system/common/__init__.py | 17 + providers/common/sql/tests/conftest.py | 13 - providers/common/sql/tests/system/__init__.py | 17 + .../sql/tests/system/common/__init__.py | 17 + providers/databricks/docs/index.rst | 2 +- providers/databricks/tests/conftest.py | 13 - providers/databricks/tests/system/__init__.py | 17 + providers/datadog/tests/conftest.py | 13 - providers/dbt/cloud/tests/conftest.py | 13 - providers/dbt/cloud/tests/system/__init__.py | 17 + .../dbt/cloud/tests/system/dbt/__init__.py | 17 + providers/dingding/tests/conftest.py | 13 - providers/dingding/tests/system/__init__.py | 17 + providers/discord/tests/conftest.py | 13 - providers/docker/tests/conftest.py | 10 - providers/docker/tests/system/__init__.py | 17 + .../system/docker/example_docker_copy_data.py | 3 +- providers/edge/docs/install_on_windows.rst | 2 +- .../providers/edge/models/edge_worker.py | 10 +- providers/edge/tests/conftest.py | 13 - .../elasticsearch/hooks/elasticsearch.py | 2 +- providers/elasticsearch/tests/conftest.py | 13 - .../elasticsearch/tests/system/__init__.py | 17 + providers/exasol/tests/conftest.py | 13 - providers/fab/tests/conftest.py | 13 - .../api_endpoints/api_connexion_utils.py | 1 - .../fab/auth_manager/models/test_db.py | 2 +- .../provider_tests/fab/www/views/conftest.py | 2 +- .../fab/www/views/test_views_acl.py | 617 +++++++++++++- providers/facebook/tests/conftest.py | 13 - providers/ftp/tests/conftest.py | 13 - providers/ftp/tests/system/__init__.py | 17 + providers/github/tests/conftest.py | 13 - providers/github/tests/system/__init__.py | 17 + .../tests/system/github/example_github.py | 3 +- providers/google/docs/example-dags.rst | 12 +- .../google/docs/operators/cloud/index.rst | 2 +- providers/google/pyproject.toml | 3 +- .../providers/google/get_provider_info.py | 5 +- providers/google/tests/conftest.py | 13 - .../tests/deprecations_ignore.yml | 11 - .../google/tests/integration/__init__.py | 17 + .../tests/integration/google}/__init__.py | 0 .../integration/google/cloud}/__init__.py | 0 .../google/cloud/transfers}/__init__.py | 0 .../cloud/transfers/test_bigquery_to_mssql.py | 0 .../cloud/transfers/test_mssql_to_gcs.py | 0 .../cloud/transfers/test_trino_to_gcs.py | 0 .../google/cloud/hooks/test_cloud_sql.py | 3 +- .../cloud/operators/test_dataprep_system.py | 2 +- providers/google/tests/system/__init__.py | 17 + .../google/tests/system/google/README.md | 26 +- .../tests/system/google/ads/example_ads.py | 3 +- .../azure/example_azure_fileshare_to_gcs.py | 3 +- .../cloud/bigquery/example_bigquery_dts.py | 5 +- .../cloud/bigquery/example_bigquery_jobs.py | 5 +- .../bigquery/example_bigquery_operations.py | 3 +- .../bigquery/example_bigquery_queries.py | 5 +- .../example_bigquery_queries_async.py | 3 +- .../example_bigquery_queries_location.py | 3 +- .../cloud/bigquery/example_bigquery_tables.py | 5 +- .../cloud/bigquery/example_bigquery_to_gcs.py | 3 +- .../bigquery/example_bigquery_to_gcs_async.py | 5 +- .../bigquery/example_bigquery_transfer.py | 5 +- .../google/cloud/bigtable/example_bigtable.py | 4 +- .../cloud/cloud_batch/example_cloud_batch.py | 4 +- .../cloud/cloud_build/example_cloud_build.py | 2 +- .../example_cloud_build_trigger.py | 2 +- .../cloud_functions/example_functions.py | 3 +- .../example_cloud_memorystore_memcached.py | 3 +- .../example_cloud_memorystore_redis.py | 4 +- .../cloud/cloud_run/example_cloud_run.py | 5 +- .../cloud_run/example_cloud_run_service.py | 5 +- .../cloud/cloud_sql/example_cloud_sql.py | 3 +- .../cloud_sql/example_cloud_sql_query.py | 4 +- .../cloud_sql/example_cloud_sql_query_iam.py | 2 +- .../cloud_sql/example_cloud_sql_query_ssl.py | 2 +- .../google/cloud/compute/example_compute.py | 3 +- .../cloud/compute/example_compute_igm.py | 3 +- .../cloud/compute/example_compute_ssh.py | 3 +- .../compute/example_compute_ssh_os_login.py | 3 +- .../compute/example_compute_ssh_parallel.py | 3 +- .../example_dlp_deidentify_content.py | 4 +- .../example_dlp_info_types.py | 6 +- .../example_dlp_inspect_template.py | 4 +- .../data_loss_prevention/example_dlp_job.py | 4 +- .../example_dlp_job_trigger.py | 3 +- .../dataflow/example_dataflow_template.py | 3 +- .../cloud/dataflow/example_dataflow_yaml.py | 3 +- .../google/cloud/dataform/example_dataform.py | 4 +- .../cloud/datafusion/example_datafusion.py | 3 +- .../google/cloud/dataplex/example_dataplex.py | 3 +- .../dataplex/example_dataplex_catalog.py | 3 +- .../cloud/dataplex/example_dataplex_dp.py | 8 +- .../cloud/dataplex/example_dataplex_dq.py | 8 +- .../google/cloud/dataprep/example_dataprep.py | 3 +- .../cloud/dataproc/example_dataproc_batch.py | 4 +- .../example_dataproc_batch_deferrable.py | 4 +- .../example_dataproc_batch_persistent.py | 4 +- ...cluster_create_existing_stopped_cluster.py | 4 +- .../example_dataproc_cluster_deferrable.py | 4 +- .../example_dataproc_cluster_diagnose.py | 4 +- .../example_dataproc_cluster_generator.py | 4 +- .../example_dataproc_cluster_start_stop.py | 4 +- .../example_dataproc_cluster_update.py | 4 +- .../cloud/dataproc/example_dataproc_flink.py | 4 +- .../cloud/dataproc/example_dataproc_gke.py | 4 +- .../cloud/dataproc/example_dataproc_hadoop.py | 4 +- .../cloud/dataproc/example_dataproc_hive.py | 4 +- .../cloud/dataproc/example_dataproc_pig.py | 4 +- .../cloud/dataproc/example_dataproc_presto.py | 4 +- .../dataproc/example_dataproc_pyspark.py | 4 +- .../cloud/dataproc/example_dataproc_spark.py | 4 +- .../dataproc/example_dataproc_spark_async.py | 4 +- .../example_dataproc_spark_deferrable.py | 4 +- .../dataproc/example_dataproc_spark_sql.py | 4 +- .../cloud/dataproc/example_dataproc_sparkr.py | 4 +- .../cloud/dataproc/example_dataproc_trino.py | 4 +- .../dataproc/example_dataproc_workflow.py | 3 +- .../example_dataproc_workflow_deferrable.py | 3 +- .../example_dataproc_metastore.py | 4 +- .../example_dataproc_metastore_backup.py | 3 +- ...ataproc_metastore_hive_partition_sensor.py | 3 +- .../datastore/example_datastore_commit.py | 3 +- .../datastore/example_datastore_query.py | 3 +- .../datastore/example_datastore_rollback.py | 3 +- .../google/cloud/gcs/example_gcs_acl.py | 3 +- .../cloud/gcs/example_gcs_copy_delete.py | 5 +- .../google/cloud/gcs/example_gcs_sensor.py | 3 +- .../cloud/gcs/example_gcs_to_bigquery.py | 5 +- .../gcs/example_gcs_to_bigquery_async.py | 5 +- .../google/cloud/gcs/example_gcs_to_gcs.py | 5 +- .../google/cloud/gcs/example_gcs_to_gdrive.py | 3 +- .../google/cloud/gcs/example_gcs_to_sftp.py | 5 +- .../google/cloud/gcs/example_gcs_transform.py | 5 +- .../gcs/example_gcs_transform_timespan.py | 5 +- .../cloud/gcs/example_gcs_upload_download.py | 5 +- .../google/cloud/gcs/example_gdrive_to_gcs.py | 3 +- .../google/cloud/gcs/example_mssql_to_gcs.py | 4 +- .../google/cloud/gcs/example_mysql_to_gcs.py | 2 +- .../google/cloud/gcs/example_oracle_to_gcs.py | 3 +- .../google/cloud/gcs/example_s3_to_gcs.py | 5 +- .../google/cloud/gcs/example_sftp_to_gcs.py | 5 +- .../google/cloud/gcs/example_trino_to_gcs.py | 3 +- .../example_kubernetes_engine.py | 3 +- .../example_kubernetes_engine_async.py | 3 +- .../life_sciences/example_life_sciences.py | 3 +- .../cloud/ml_engine/example_mlengine.py | 7 +- .../example_natural_language.py | 3 +- .../google/cloud/spanner/example_spanner.py | 3 +- .../speech_to_text/example_speech_to_text.py | 4 +- .../cloud/stackdriver/example_stackdriver.py | 3 +- ...mple_cloud_storage_transfer_service_aws.py | 3 +- .../google/cloud/tasks/example_queue.py | 7 +- .../google/cloud/tasks/example_tasks.py | 7 +- .../text_to_speech/example_text_to_speech.py | 3 +- .../cloud/transfers/example_gcs_to_sftp.py | 3 +- .../transfers/example_gdrive_to_local.py | 3 +- .../example_translate_speech.py | 3 +- ..._vertex_ai_auto_ml_forecasting_training.py | 7 +- ...ertex_ai_auto_ml_image_object_detection.py | 5 +- ...xample_vertex_ai_auto_ml_image_training.py | 5 +- ...mple_vertex_ai_auto_ml_tabular_training.py | 7 +- ...xample_vertex_ai_auto_ml_video_tracking.py | 5 +- ...xample_vertex_ai_auto_ml_video_training.py | 5 +- .../example_vertex_ai_batch_prediction_job.py | 7 +- .../example_vertex_ai_custom_container.py | 7 +- .../vertex_ai/example_vertex_ai_custom_job.py | 7 +- ...ple_vertex_ai_custom_job_python_package.py | 7 +- .../vertex_ai/example_vertex_ai_dataset.py | 7 +- .../vertex_ai/example_vertex_ai_endpoint.py | 5 +- ...ple_vertex_ai_hyperparameter_tuning_job.py | 3 +- .../example_vertex_ai_model_service.py | 7 +- .../example_vertex_ai_pipeline_job.py | 7 +- .../example_video_intelligence.py | 3 +- .../vision/example_vision_annotate_image.py | 2 +- .../vision/example_vision_autogenerated.py | 2 +- .../cloud/vision/example_vision_explicit.py | 2 +- .../cloud/workflows/example_workflows.py | 3 +- .../google/tests/system/google/conftest.py | 2 +- .../example_datacatalog_entries.py | 4 +- .../example_datacatalog_search_catalog.py | 4 +- .../example_datacatalog_tag_templates.py | 4 +- .../datacatalog/example_datacatalog_tags.py | 4 +- .../example_analytics_admin.py | 5 +- .../example_campaign_manager.py | 4 +- providers/grpc/tests/conftest.py | 13 - providers/hashicorp/tests/conftest.py | 13 - providers/http/tests/conftest.py | 13 - providers/http/tests/system/__init__.py | 17 + providers/imap/tests/conftest.py | 13 - providers/influxdb/tests/conftest.py | 13 - providers/influxdb/tests/system/__init__.py | 17 + providers/jdbc/tests/conftest.py | 13 - providers/jdbc/tests/system/__init__.py | 17 + providers/jenkins/tests/conftest.py | 13 - providers/jenkins/tests/system/__init__.py | 17 + providers/microsoft/azure/docs/index.rst | 2 +- providers/microsoft/azure/tests/conftest.py | 127 +-- .../provider_tests/microsoft/azure/base.py | 2 +- .../microsoft/azure/hooks/test_msgraph.py | 16 +- .../microsoft/azure/operators/test_msgraph.py | 7 +- .../microsoft/azure/operators/test_powerbi.py | 2 +- .../microsoft/azure/sensors/test_msgraph.py | 2 +- .../microsoft/azure/test_utils.py | 102 +++ .../microsoft/azure/triggers/test_msgraph.py | 14 +- .../microsoft/azure/triggers/test_powerbi.py | 2 +- .../microsoft/azure/tests/system/__init__.py | 17 + .../azure/tests/system/microsoft/__init__.py | 17 + .../microsoft/azure/example_s3_to_wasb.py | 3 +- providers/microsoft/mssql/docs/index.rst | 2 +- providers/microsoft/mssql/tests/conftest.py | 39 - .../mssql/tests/integration/__init__.py | 17 + .../tests/integration/microsoft/__init__.py | 17 + .../integration/microsoft/mssql}/__init__.py | 0 .../microsoft/mssql/hooks}/__init__.py | 0 .../microsoft/mssql/hooks/test_mssql.py | 0 .../microsoft/mssql/hooks/test_mssql.py | 4 +- .../microsoft/mssql/test_utils.py | 39 + .../microsoft/mssql/tests/system/__init__.py | 17 + .../mssql/tests/system/microsoft/__init__.py | 17 + providers/microsoft/psrp/tests/conftest.py | 13 - providers/microsoft/winrm/docs/index.rst | 2 +- providers/microsoft/winrm/tests/conftest.py | 13 - .../microsoft/winrm/tests/system/__init__.py | 17 + .../winrm/tests/system/microsoft/__init__.py | 17 + providers/mongo/tests/conftest.py | 13 - providers/mongo/tests/integration/__init__.py | 17 + .../tests/integration/mongo}/__init__.py | 0 .../integration/mongo/sensors}/__init__.py | 0 .../integration/mongo/sensors/test_mongo.py | 0 providers/mysql/tests/conftest.py | 13 - providers/mysql/tests/system/__init__.py | 17 + providers/neo4j/tests/conftest.py | 13 - providers/neo4j/tests/system/__init__.py | 17 + providers/odbc/tests/conftest.py | 13 - providers/odbc/tests/system/__init__.py | 17 + providers/openai/tests/conftest.py | 13 - providers/openai/tests/system/__init__.py | 17 + providers/openfaas/tests/conftest.py | 13 - .../openlineage/docs/guides/developer.rst | 6 +- providers/openlineage/tests/conftest.py | 13 - .../openlineage/tests/integration/__init__.py | 17 + .../integration/openlineage}/__init__.py | 0 .../openlineage/operators}/__init__.py | 0 .../openlineage/tests/system/__init__.py | 17 + .../tests/system/openlineage/conftest.py | 3 +- .../system/openlineage/example_openlineage.py | 3 +- .../example_openlineage_mapped_sensor.py | 3 +- .../system/openlineage/transport/variable.py | 3 +- providers/opensearch/tests/conftest.py | 13 - providers/opensearch/tests/system/__init__.py | 17 + providers/opsgenie/tests/conftest.py | 13 - providers/opsgenie/tests/system/__init__.py | 17 + providers/oracle/pyproject.toml | 2 +- .../providers/oracle/get_provider_info.py | 2 +- providers/oracle/tests/conftest.py | 13 - providers/oracle/tests/system/__init__.py | 17 + providers/pagerduty/tests/conftest.py | 13 - providers/papermill/tests/conftest.py | 13 - providers/papermill/tests/system/__init__.py | 17 + providers/pgvector/tests/conftest.py | 13 - providers/pgvector/tests/system/__init__.py | 17 + providers/pinecone/tests/conftest.py | 13 - providers/pinecone/tests/system/__init__.py | 17 + providers/postgres/tests/conftest.py | 13 - providers/postgres/tests/system/__init__.py | 17 + providers/presto/tests/conftest.py | 13 - providers/presto/tests/system/__init__.py | 17 + providers/pyproject.toml | 93 --- providers/qdrant/tests/conftest.py | 13 - .../qdrant/tests/integration/__init__.py | 17 + .../tests/integration/qdrant}/__init__.py | 0 .../integration/qdrant/hooks}/__init__.py | 0 .../integration/qdrant/hooks/test_qdrant.py | 0 .../integration/qdrant/operators}/__init__.py | 0 .../qdrant/operators/test_qdrant_ingest.py | 0 providers/qdrant/tests/system/__init__.py | 17 + providers/redis/tests/conftest.py | 13 - providers/redis/tests/integration/__init__.py | 17 + .../tests/integration/redis}/__init__.py | 0 .../integration/redis/hooks}/__init__.py | 0 .../integration/redis/hooks/test_redis.py | 0 .../integration/redis/operators}/__init__.py | 0 .../redis/operators/test_redis_publish.py | 0 .../integration/redis/sensors}/__init__.py | 0 .../redis/sensors/test_redis_key.py | 0 .../redis/sensors/test_redis_pub_sub.py | 0 providers/redis/tests/system/__init__.py | 17 + providers/salesforce/tests/conftest.py | 13 - providers/salesforce/tests/system/__init__.py | 17 + providers/samba/tests/conftest.py | 13 - providers/samba/tests/system/__init__.py | 17 + .../system/samba/example_gcs_to_samba.py | 3 +- providers/segment/tests/conftest.py | 13 - providers/sendgrid/tests/conftest.py | 13 - providers/sftp/tests/conftest.py | 13 - providers/sftp/tests/system/__init__.py | 17 + providers/singularity/tests/conftest.py | 13 - .../singularity/tests/system/__init__.py | 17 + providers/slack/tests/conftest.py | 13 - providers/slack/tests/system/__init__.py | 17 + providers/smtp/tests/conftest.py | 13 - .../snowflake/operators/snowflake.py | 7 +- providers/snowflake/tests/conftest.py | 13 - providers/snowflake/tests/system/__init__.py | 17 + providers/sqlite/tests/conftest.py | 13 - providers/sqlite/tests/system/__init__.py | 17 + providers/src/README.md | 44 - providers/src/airflow/__init__.py | 21 - providers/src/airflow/providers/.gitignore | 1 - providers/src/airflow/providers/__init__.py | 21 - providers/ssh/tests/conftest.py | 13 - providers/standard/tests/conftest.py | 13 - providers/tableau/tests/conftest.py | 13 - providers/tableau/tests/system/__init__.py | 17 + providers/telegram/tests/conftest.py | 13 - providers/telegram/tests/system/__init__.py | 17 + providers/teradata/tests/conftest.py | 13 - providers/teradata/tests/system/__init__.py | 17 + providers/tests/conftest.py | 68 -- .../integration/redis/sensors/__init__.py | 16 - providers/tests/integration/trino/__init__.py | 16 - .../tests/integration/trino/hooks/__init__.py | 16 - providers/tests/integration/ydb/__init__.py | 16 - .../tests/integration/ydb/hooks/__init__.py | 16 - .../integration/ydb/operators/__init__.py | 16 - providers/tests/system/__init__.py | 16 - providers/tests/system/cncf/__init__.py | 16 - providers/tests/system/conftest.py | 50 -- providers/tests/system/dbt/__init__.py | 16 - providers/tests/system/example_empty.py | 48 -- providers/tests/system/microsoft/__init__.py | 16 - providers/trino/tests/conftest.py | 13 - providers/trino/tests/integration/__init__.py | 17 + .../tests/integration/trino}/__init__.py | 0 .../integration/trino/hooks}/__init__.py | 0 .../integration/trino/hooks/test_trino.py | 0 providers/trino/tests/system/__init__.py | 17 + providers/vertica/tests/conftest.py | 13 - providers/vertica/tests/system/__init__.py | 17 + providers/weaviate/tests/conftest.py | 13 - providers/weaviate/tests/system/__init__.py | 17 + .../example_weaviate_dynamic_mapping_dag.py | 2 +- .../weaviate/example_weaviate_operator.py | 4 +- .../weaviate/example_weaviate_using_hook.py | 4 +- .../example_weaviate_vectorizer_dag.py | 2 +- providers/yandex/tests/conftest.py | 13 - providers/yandex/tests/system/__init__.py | 17 + .../system/yandex/example_yandexcloud.py | 6 +- providers/ydb/tests/conftest.py | 13 - providers/ydb/tests/integration/__init__.py | 17 + .../tests/integration/ydb}/__init__.py | 0 .../tests/integration/ydb/hooks}/__init__.py | 0 .../integration/ydb/operators}/__init__.py | 0 .../integration/ydb/operators/test_ydb.py | 0 providers/ydb/tests/system/__init__.py | 17 + providers/ydb/tests/system/ydb/example_ydb.py | 1 + providers/zendesk/tests/conftest.py | 13 - providers/zendesk/tests/system/__init__.py | 17 + pyproject.toml | 28 +- scripts/ci/docker-compose/remove-sources.yml | 4 - scripts/ci/kubernetes/k8s_requirements.txt | 1 - .../pre_commit/check_imports_in_providers.py | 4 - ...eck_providers_subpackages_all_have_init.py | 131 ++- scripts/ci/pre_commit/mypy_folder.py | 4 - .../pre_commit/update_example_dags_paths.py | 2 +- .../update_providers_build_files.py | 32 +- scripts/ci/testing/run_unit_tests.sh | 5 - scripts/in_container/run_mypy.sh | 4 +- .../run_provider_yaml_files_check.py | 7 +- tests/always/test_example_dags.py | 2 + tests/always/test_project_structure.py | 226 +++--- tests/conftest.py | 8 - tests/serialization/test_dag_serialization.py | 13 +- tests/www/views/conftest.py | 80 -- tests/www/views/test_views.py | 11 +- tests/www/views/test_views_dagrun.py | 113 --- tests/www/views/test_views_home.py | 194 +---- tests/www/views/test_views_tasks.py | 157 +--- tests/www/views/test_views_trigger_dag.py | 24 - tests/www/views/test_views_variable.py | 42 - tests_common/_internals/capture_warnings.py | 8 +- tests_common/_internals/forbidden_warnings.py | 4 - tests_common/pytest_plugin.py | 22 +- tests_common/test_utils/gcp_system_helpers.py | 8 +- tests_common/test_utils/www.py | 85 ++ 604 files changed, 3500 insertions(+), 4491 deletions(-) delete mode 100644 dev/example_dags/README.md delete mode 100755 dev/example_dags/update_example_dags_paths.py delete mode 100644 dev/moving_providers/README.md delete mode 100755 dev/moving_providers/move_providers.py rename providers/{src/airflow/providers => }/MANAGING_PROVIDERS_LIFECYCLE.rst (93%) rename providers/{ => airbyte/tests/system}/__init__.py (100%) rename providers/{tests => alibaba/tests/system}/__init__.py (100%) rename providers/{tests/apache => amazon/tests/provider_tests/amazon/aws/auth_manager/router}/__init__.py (100%) rename providers/{tests/integration/redis/hooks => amazon/tests/system}/__init__.py (90%) rename providers/{tests/integration/redis/operators => apache/beam/docs/_api/tests/system}/__init__.py (90%) rename providers/{tests/email => apache/beam/docs/_api/tests/system/apache}/__init__.py (90%) rename providers/{tests/email/operators => apache/beam/tests/system}/__init__.py (90%) create mode 100644 providers/apache/beam/tests/system/apache/__init__.py create mode 100644 providers/apache/cassandra/tests/integration/__init__.py create mode 100644 providers/apache/cassandra/tests/integration/apache/__init__.py rename providers/{tests/atlassian => apache/cassandra/tests/integration/apache/cassandra}/__init__.py (100%) rename providers/{tests/cncf => apache/cassandra/tests/integration/apache/cassandra/hooks}/__init__.py (100%) rename providers/{ => apache/cassandra}/tests/integration/apache/cassandra/hooks/test_cassandra.py (100%) create mode 100644 providers/apache/cassandra/tests/system/__init__.py create mode 100644 providers/apache/cassandra/tests/system/apache/__init__.py create mode 100644 providers/apache/drill/tests/integration/__init__.py create mode 100644 providers/apache/drill/tests/integration/apache/__init__.py rename providers/{tests/common => apache/drill/tests/integration/apache/drill}/__init__.py (100%) rename providers/{tests/dbt => apache/drill/tests/integration/apache/drill/hooks}/__init__.py (100%) rename providers/{ => apache/drill}/tests/integration/apache/drill/hooks/test_drill.py (100%) rename providers/{tests/integration => apache/drill/tests/integration/apache/drill/operators}/__init__.py (100%) create mode 100644 providers/apache/drill/tests/system/__init__.py create mode 100644 providers/apache/drill/tests/system/apache/__init__.py create mode 100644 providers/apache/druid/tests/system/__init__.py create mode 100644 providers/apache/druid/tests/system/apache/__init__.py create mode 100644 providers/apache/hive/tests/integration/__init__.py create mode 100644 providers/apache/hive/tests/integration/apache/__init__.py rename providers/{tests/integration/apache => apache/hive/tests/integration/apache/hive}/__init__.py (100%) rename providers/{tests/integration/apache/cassandra => apache/hive/tests/integration/apache/hive/transfers}/__init__.py (100%) rename providers/{ => apache/hive}/tests/integration/apache/hive/transfers/test_mssql_to_hive.py (100%) create mode 100644 providers/apache/hive/tests/system/__init__.py create mode 100644 providers/apache/hive/tests/system/apache/__init__.py create mode 100644 providers/apache/iceberg/tests/system/__init__.py create mode 100644 providers/apache/iceberg/tests/system/apache/__init__.py create mode 100644 providers/apache/kafka/tests/integration/__init__.py create mode 100644 providers/apache/kafka/tests/integration/apache/__init__.py rename providers/{tests/integration/apache/cassandra/hooks => apache/kafka/tests/integration/apache/kafka}/__init__.py (100%) rename providers/{tests/integration/apache/drill => apache/kafka/tests/integration/apache/kafka/hooks}/__init__.py (100%) rename providers/{ => apache/kafka}/tests/integration/apache/kafka/hooks/test_admin_client.py (100%) rename providers/{ => apache/kafka}/tests/integration/apache/kafka/hooks/test_consumer.py (100%) rename providers/{ => apache/kafka}/tests/integration/apache/kafka/hooks/test_producer.py (100%) rename providers/{tests/integration/apache/drill/hooks => apache/kafka/tests/integration/apache/kafka/operators}/__init__.py (100%) rename providers/{ => apache/kafka}/tests/integration/apache/kafka/operators/test_consume.py (97%) rename providers/{ => apache/kafka}/tests/integration/apache/kafka/operators/test_produce.py (96%) rename providers/{tests/integration/apache/drill/operators => apache/kafka/tests/integration/apache/kafka/sensors}/__init__.py (100%) rename providers/{tests/integration/apache/hive => apache/kafka/tests/integration/apache/kafka/triggers}/__init__.py (100%) rename providers/{ => apache/kafka}/tests/integration/apache/kafka/triggers/test_await_message.py (96%) create mode 100644 providers/apache/kafka/tests/system/__init__.py create mode 100644 providers/apache/kafka/tests/system/apache/__init__.py create mode 100644 providers/apache/kylin/tests/system/__init__.py create mode 100644 providers/apache/kylin/tests/system/apache/__init__.py create mode 100644 providers/apache/livy/tests/system/__init__.py create mode 100644 providers/apache/livy/tests/system/apache/__init__.py create mode 100644 providers/apache/pig/tests/system/__init__.py create mode 100644 providers/apache/pig/tests/system/apache/__init__.py create mode 100644 providers/apache/pinot/tests/integration/__init__.py create mode 100644 providers/apache/pinot/tests/integration/apache/__init__.py create mode 100644 providers/apache/pinot/tests/integration/apache/pinot/__init__.py rename providers/{tests/integration/apache/hive/transfers => apache/pinot/tests/integration/apache/pinot/hooks}/__init__.py (100%) rename providers/{ => apache/pinot}/tests/integration/apache/pinot/hooks/test_pinot.py (100%) create mode 100644 providers/apache/pinot/tests/system/__init__.py create mode 100644 providers/apache/pinot/tests/system/apache/__init__.py create mode 100644 providers/apache/spark/tests/system/__init__.py create mode 100644 providers/apache/spark/tests/system/apache/__init__.py create mode 100644 providers/asana/tests/system/__init__.py create mode 100644 providers/cncf/kubernetes/tests/system/__init__.py create mode 100644 providers/cncf/kubernetes/tests/system/cncf/__init__.py create mode 100644 providers/cohere/tests/system/__init__.py create mode 100644 providers/common/io/tests/system/__init__.py create mode 100644 providers/common/io/tests/system/common/__init__.py create mode 100644 providers/common/sql/tests/system/__init__.py create mode 100644 providers/common/sql/tests/system/common/__init__.py create mode 100644 providers/databricks/tests/system/__init__.py create mode 100644 providers/dbt/cloud/tests/system/__init__.py create mode 100644 providers/dbt/cloud/tests/system/dbt/__init__.py create mode 100644 providers/dingding/tests/system/__init__.py create mode 100644 providers/docker/tests/system/__init__.py create mode 100644 providers/elasticsearch/tests/system/__init__.py create mode 100644 providers/ftp/tests/system/__init__.py create mode 100644 providers/github/tests/system/__init__.py rename providers/{ => google}/tests/deprecations_ignore.yml (92%) create mode 100644 providers/google/tests/integration/__init__.py rename providers/{tests/integration/apache/kafka => google/tests/integration/google}/__init__.py (100%) rename providers/{tests/integration/apache/kafka/hooks => google/tests/integration/google/cloud}/__init__.py (100%) rename providers/{tests/integration/apache/kafka/operators => google/tests/integration/google/cloud/transfers}/__init__.py (100%) rename providers/{ => google}/tests/integration/google/cloud/transfers/test_bigquery_to_mssql.py (100%) rename providers/{ => google}/tests/integration/google/cloud/transfers/test_mssql_to_gcs.py (100%) rename providers/{ => google}/tests/integration/google/cloud/transfers/test_trino_to_gcs.py (100%) create mode 100644 providers/google/tests/system/__init__.py create mode 100644 providers/http/tests/system/__init__.py create mode 100644 providers/influxdb/tests/system/__init__.py create mode 100644 providers/jdbc/tests/system/__init__.py create mode 100644 providers/jenkins/tests/system/__init__.py create mode 100644 providers/microsoft/azure/tests/system/__init__.py create mode 100644 providers/microsoft/azure/tests/system/microsoft/__init__.py create mode 100644 providers/microsoft/mssql/tests/integration/__init__.py create mode 100644 providers/microsoft/mssql/tests/integration/microsoft/__init__.py rename providers/{tests/integration/apache/kafka/sensors => microsoft/mssql/tests/integration/microsoft/mssql}/__init__.py (100%) rename providers/{tests/integration/apache/kafka/triggers => microsoft/mssql/tests/integration/microsoft/mssql/hooks}/__init__.py (100%) rename providers/{ => microsoft/mssql}/tests/integration/microsoft/mssql/hooks/test_mssql.py (100%) create mode 100644 providers/microsoft/mssql/tests/provider_tests/microsoft/mssql/test_utils.py create mode 100644 providers/microsoft/mssql/tests/system/__init__.py create mode 100644 providers/microsoft/mssql/tests/system/microsoft/__init__.py create mode 100644 providers/microsoft/winrm/tests/system/__init__.py create mode 100644 providers/microsoft/winrm/tests/system/microsoft/__init__.py create mode 100644 providers/mongo/tests/integration/__init__.py rename providers/{tests/integration/apache/pinot => mongo/tests/integration/mongo}/__init__.py (100%) rename providers/{tests/integration/apache/pinot/hooks => mongo/tests/integration/mongo/sensors}/__init__.py (100%) rename providers/{ => mongo}/tests/integration/mongo/sensors/test_mongo.py (100%) create mode 100644 providers/mysql/tests/system/__init__.py create mode 100644 providers/neo4j/tests/system/__init__.py create mode 100644 providers/odbc/tests/system/__init__.py create mode 100644 providers/openai/tests/system/__init__.py create mode 100644 providers/openlineage/tests/integration/__init__.py rename providers/{tests/integration/google => openlineage/tests/integration/openlineage}/__init__.py (100%) rename providers/{tests/integration/google/cloud => openlineage/tests/integration/openlineage/operators}/__init__.py (100%) create mode 100644 providers/openlineage/tests/system/__init__.py create mode 100644 providers/opensearch/tests/system/__init__.py create mode 100644 providers/opsgenie/tests/system/__init__.py create mode 100644 providers/oracle/tests/system/__init__.py create mode 100644 providers/papermill/tests/system/__init__.py create mode 100644 providers/pgvector/tests/system/__init__.py create mode 100644 providers/pinecone/tests/system/__init__.py create mode 100644 providers/postgres/tests/system/__init__.py create mode 100644 providers/presto/tests/system/__init__.py delete mode 100644 providers/pyproject.toml create mode 100644 providers/qdrant/tests/integration/__init__.py rename providers/{tests/integration/google/cloud/transfers => qdrant/tests/integration/qdrant}/__init__.py (100%) rename providers/{tests/integration/microsoft => qdrant/tests/integration/qdrant/hooks}/__init__.py (100%) rename providers/{ => qdrant}/tests/integration/qdrant/hooks/test_qdrant.py (100%) rename providers/{tests/integration/microsoft/mssql => qdrant/tests/integration/qdrant/operators}/__init__.py (100%) rename providers/{ => qdrant}/tests/integration/qdrant/operators/test_qdrant_ingest.py (100%) create mode 100644 providers/qdrant/tests/system/__init__.py create mode 100644 providers/redis/tests/integration/__init__.py rename providers/{tests/integration/microsoft/mssql/hooks => redis/tests/integration/redis}/__init__.py (100%) rename providers/{tests/integration/mongo => redis/tests/integration/redis/hooks}/__init__.py (100%) rename providers/{ => redis}/tests/integration/redis/hooks/test_redis.py (100%) rename providers/{tests/integration/mongo/sensors => redis/tests/integration/redis/operators}/__init__.py (100%) rename providers/{ => redis}/tests/integration/redis/operators/test_redis_publish.py (100%) rename providers/{tests/integration/openlineage => redis/tests/integration/redis/sensors}/__init__.py (100%) rename providers/{ => redis}/tests/integration/redis/sensors/test_redis_key.py (100%) rename providers/{ => redis}/tests/integration/redis/sensors/test_redis_pub_sub.py (100%) create mode 100644 providers/redis/tests/system/__init__.py create mode 100644 providers/salesforce/tests/system/__init__.py create mode 100644 providers/samba/tests/system/__init__.py create mode 100644 providers/sftp/tests/system/__init__.py create mode 100644 providers/singularity/tests/system/__init__.py create mode 100644 providers/slack/tests/system/__init__.py create mode 100644 providers/snowflake/tests/system/__init__.py create mode 100644 providers/sqlite/tests/system/__init__.py delete mode 100644 providers/src/README.md delete mode 100644 providers/src/airflow/__init__.py delete mode 100644 providers/src/airflow/providers/.gitignore delete mode 100644 providers/src/airflow/providers/__init__.py create mode 100644 providers/tableau/tests/system/__init__.py create mode 100644 providers/telegram/tests/system/__init__.py create mode 100644 providers/teradata/tests/system/__init__.py delete mode 100644 providers/tests/conftest.py delete mode 100644 providers/tests/integration/redis/sensors/__init__.py delete mode 100644 providers/tests/integration/trino/__init__.py delete mode 100644 providers/tests/integration/trino/hooks/__init__.py delete mode 100644 providers/tests/integration/ydb/__init__.py delete mode 100644 providers/tests/integration/ydb/hooks/__init__.py delete mode 100644 providers/tests/integration/ydb/operators/__init__.py delete mode 100644 providers/tests/system/__init__.py delete mode 100644 providers/tests/system/cncf/__init__.py delete mode 100644 providers/tests/system/conftest.py delete mode 100644 providers/tests/system/dbt/__init__.py delete mode 100644 providers/tests/system/example_empty.py delete mode 100644 providers/tests/system/microsoft/__init__.py create mode 100644 providers/trino/tests/integration/__init__.py rename providers/{tests/integration/openlineage/operators => trino/tests/integration/trino}/__init__.py (100%) rename providers/{tests/integration/qdrant => trino/tests/integration/trino/hooks}/__init__.py (100%) rename providers/{ => trino}/tests/integration/trino/hooks/test_trino.py (100%) create mode 100644 providers/trino/tests/system/__init__.py create mode 100644 providers/vertica/tests/system/__init__.py create mode 100644 providers/weaviate/tests/system/__init__.py create mode 100644 providers/yandex/tests/system/__init__.py create mode 100644 providers/ydb/tests/integration/__init__.py rename providers/{tests/integration/qdrant/hooks => ydb/tests/integration/ydb}/__init__.py (100%) rename providers/{tests/integration/qdrant/operators => ydb/tests/integration/ydb/hooks}/__init__.py (100%) rename providers/{tests/integration/redis => ydb/tests/integration/ydb/operators}/__init__.py (100%) rename providers/{ => ydb}/tests/integration/ydb/operators/test_ydb.py (100%) create mode 100644 providers/ydb/tests/system/__init__.py create mode 100644 providers/zendesk/tests/system/__init__.py diff --git a/.github/workflows/integration-system-tests.yml b/.github/workflows/integration-system-tests.yml index 7c3916d9d19c9..c884c177beae6 100644 --- a/.github/workflows/integration-system-tests.yml +++ b/.github/workflows/integration-system-tests.yml @@ -197,8 +197,7 @@ jobs: use-uv: ${{ inputs.use-uv }} - name: "System Tests" run: > - ./scripts/ci/testing/run_system_tests.sh - tests/system/example_empty.py providers/tests/system/example_empty.py + ./scripts/ci/testing/run_system_tests.sh tests/system/example_empty.py - name: "Post Tests success" uses: ./.github/actions/post_tests_success with: diff --git a/.github/workflows/special-tests.yml b/.github/workflows/special-tests.yml index 8507294e535c6..694212257a2e5 100644 --- a/.github/workflows/special-tests.yml +++ b/.github/workflows/special-tests.yml @@ -218,7 +218,7 @@ jobs: runs-on-as-json-default: ${{ inputs.runs-on-as-json-default }} test-name: "SystemTest" test-scope: "System" - test-groups: ${{ inputs.test-groups }} + test-groups: "['core']" backend: "postgres" python-versions: "['${{ inputs.default-python-version }}']" backend-versions: "['${{ inputs.default-postgres-version }}']" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2435d005fb1bb..d667342737bb0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -556,9 +556,6 @@ repos: language: pygrep entry: > (?i) - .*https://github.*[0-9]/providers/.*/tests/system/| - .*https://github.*/main/providers/.*/tests/system/| - .*https://github.*/master/providers/.*/tests/system/| .*https://github.*/main/providers/.*/src/airflow/providers/.*/example_dags/| .*https://github.*/master/providers/.*/src/airflow/providers/.*/example_dags/ pass_filenames: true @@ -674,7 +671,6 @@ repos: ^scripts/ci/docker-compose/integration-keycloak.yml$| ^scripts/ci/docker-compose/keycloak/keycloak-entrypoint.sh$| ^tests/| - ^providers/tests/| ^providers/.*/tests/| ^.pre-commit-config\.yaml$| ^.*CHANGELOG\.(rst|txt)$| @@ -920,6 +916,7 @@ repos: entry: ./scripts/ci/pre_commit/check_providers_subpackages_all_have_init.py language: python require_serial: true + additional_dependencies: ['rich>=12.4.4'] - id: check-pre-commit-information-consistent name: Validate hook IDs & names and sync with docs entry: ./scripts/ci/pre_commit/check_pre_commit_hooks.py @@ -983,7 +980,6 @@ repos: entry: ./scripts/ci/pre_commit/check_system_tests.py language: python files: ^(providers/)?tests/system/.*/example_[^/]*\.py$ - exclude: ^providers/google/tests/system/google/cloud/bigquery/example_bigquery_queries\.py$ pass_filenames: true additional_dependencies: ['rich>=12.4.4'] - id: generate-pypi-readme @@ -1386,7 +1382,7 @@ repos: stages: ['manual'] name: Run mypy for providers (manual) language: python - entry: ./scripts/ci/pre_commit/mypy_folder.py providers/src/airflow/providers all_new_providers + entry: ./scripts/ci/pre_commit/mypy_folder.py all_new_providers pass_filenames: false files: ^.*\.py$ require_serial: true diff --git a/PROVIDERS.rst b/PROVIDERS.rst index 8c0ee94f7b05a..4b0f77e5aa029 100644 --- a/PROVIDERS.rst +++ b/PROVIDERS.rst @@ -85,7 +85,7 @@ Airflow main branch to being decommissioned and removed from the main branch in Technical details on how to manage lifecycle of providers are described in the document: - `Managing provider's lifecycle `_ + `Managing provider's lifecycle `_ Accepting new community providers diff --git a/contributing-docs/07_local_virtualenv.rst b/contributing-docs/07_local_virtualenv.rst index d3aebbbce68b7..e4e875260c3b3 100644 --- a/contributing-docs/07_local_virtualenv.rst +++ b/contributing-docs/07_local_virtualenv.rst @@ -135,14 +135,6 @@ In a project like airflow it's important to have a consistent set of dependencie You can use ``uv sync`` to install dependencies from ``pyproject.toml`` file. This will install all dependencies from the ``pyproject.toml`` file in the current directory. -.. note:: - - We are currently in the process of moving providers from old structure (where all providers were under - ``providers/src`` directory in a package structure shared between Providers) to a new structure - where each provider is a separate python package in ``providers`` directory. The "old" providers support - will be removed once we move all the providers to the new structure. - - .. code:: bash uv sync @@ -188,32 +180,20 @@ run tests is to use ``pip`` to install airflow dependencies: .. code:: bash - pip install -e "./providers" pip install -e ".[devel,devel-tests,]" # for example: pip install -e ".[devel,devel-tests,google,postgres]" -This will install: - -* old structure provider sources in ``editabl`e` mode - where sources are read from ``providers`` folder. -* airflow in ``editable`` mode - where sources of Airflow are taken directly from ``airflow`` source code. - -You need to run this command in the virtualenv you want to install Airflow in - -and you need to have the virtualenv activated. - -.. note:: +This will install airflow in ``editable`` mode - where sources of +Airflow are taken directly from ``airflow`` source code. - For the providers that are already moved (i.e. have separate folder in ``providers`` directory), instead - of adding extra in airflow command you need to separately install the provider in the same venv. For example - to install ``airbyte`` provider you can run: +You need to run this command in the virtualenv you want to install Airflow in and you need to have the virtualenv activated. .. code:: bash - pip install -e "./providers" pip install -e ".[devel,devel-tests,]" # for example: pip install -e ".[devel,devel-tests,google,postgres]" pip install -e "./providers/airbyte[devel]" This will install: - * old structure provider sources in ``editable`` mode - where sources are read from ``providers/src`` folder * airflow in ``editable`` mode - where sources of Airflow are taken directly from ``airflow`` source code. * airbyte provider in ``editable`` mode - where sources are read from ``providers/airbyte`` folder diff --git a/contributing-docs/testing/system_tests.rst b/contributing-docs/testing/system_tests.rst index b79668f9faedb..a26d64ea67334 100644 --- a/contributing-docs/testing/system_tests.rst +++ b/contributing-docs/testing/system_tests.rst @@ -61,7 +61,7 @@ There are multiple ways of running system tests. Each system test is a self-cont other DAG. Some tests may require access to external services, enabled APIs or specific permissions. Make sure to prepare your environment correctly, depending on the system tests you want to run - some may require additional configuration which should be documented by the relevant providers in their subdirectory -``providers/tests/system//README.md``. +``tests/system//README.md``. Running as Airflow DAGs ....................... @@ -105,12 +105,6 @@ For core: breeze testing system-tests tests/system/example_empty.py -For providers: - -.. code-block:: bash - - breeze testing system-tests providers/tests/system/example_empty.py - If you need to add some initialization of environment variables when entering Breeze, you can add a ``variables.env`` file in the ``files/airflow-breeze-config/variables.env`` file. diff --git a/contributing-docs/testing/unit_tests.rst b/contributing-docs/testing/unit_tests.rst index bdc670f854a1c..442d22ed78d24 100644 --- a/contributing-docs/testing/unit_tests.rst +++ b/contributing-docs/testing/unit_tests.rst @@ -871,7 +871,7 @@ will ask you to rebuild the image if it is needed and some new dependencies shou .. code-block:: bash - breeze testing providers-tests providers/tests/http/hooks/test_http.py tests/core/test_core.py --db-reset --log-cli-level=DEBUG + breeze testing providers-tests providers/http/tests/http/hooks/test_http.py tests/core/test_core.py --db-reset --log-cli-level=DEBUG You can run the whole core test suite without adding the test target: @@ -1070,7 +1070,7 @@ directly to the container. .. code-block:: bash - pytest providers/tests//test.py + pytest providers//tests/.../test.py 4. Iterate with the tests and providers. Both providers and tests are mounted from local sources so changes you do locally in both - tests and provider sources are immediately reflected inside the @@ -1207,7 +1207,7 @@ In case you want to reproduce canary run, you need to add ``--clean-airflow-inst .. code-block:: bash - pytest providers/tests//test.py + pytest providers//tests/.../test.py 7. Iterate with the tests diff --git a/dev/README_AIRFLOW3_DEV.md b/dev/README_AIRFLOW3_DEV.md index 1c221bfa80358..72493ca4d7739 100644 --- a/dev/README_AIRFLOW3_DEV.md +++ b/dev/README_AIRFLOW3_DEV.md @@ -59,7 +59,7 @@ Make sure your code is only about Providers or Helm chart. Avoid mixing core changes into the same PR > [!NOTE] -> Please note that providers have been relocated from `airflow/providers` to `providers/src/airflow/providers`. +> Please note that providers have been relocated from `airflow/providers` to `providers//src/airflow/providers`. ## Developing for Airflow 3 and 2.10.x / 2.11.x diff --git a/dev/breeze/src/airflow_breeze/commands/testing_commands.py b/dev/breeze/src/airflow_breeze/commands/testing_commands.py index ce4a10f35119e..1b3a0c0a5eb8e 100644 --- a/dev/breeze/src/airflow_breeze/commands/testing_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/testing_commands.py @@ -149,7 +149,7 @@ def docker_compose_tests( sys.exit(return_code) -TEST_PROGRESS_REGEXP = r"tests/.*|providers/.*/tests/.*|providers/tests/.*|task_sdk/tests/.*|.*=====.*" +TEST_PROGRESS_REGEXP = r"tests/.*|providers/.*/tests/.*|task_sdk/tests/.*|.*=====.*" PERCENT_TEST_PROGRESS_REGEXP = r"^tests/.*\[[ \d%]*\].*|^\..*\[[ \d%]*\].*" diff --git a/dev/breeze/src/airflow_breeze/utils/run_tests.py b/dev/breeze/src/airflow_breeze/utils/run_tests.py index fd1b8d4d52245..0d39ba58c49d6 100644 --- a/dev/breeze/src/airflow_breeze/utils/run_tests.py +++ b/dev/breeze/src/airflow_breeze/utils/run_tests.py @@ -37,7 +37,6 @@ from airflow_breeze.utils.path_utils import ( AIRFLOW_PROVIDERS_DIR, AIRFLOW_SOURCES_ROOT, - OLD_TESTS_PROVIDERS_ROOT, ) from airflow_breeze.utils.run_utils import run_command from airflow_breeze.utils.virtualenv_utils import create_temp_venv @@ -138,18 +137,7 @@ def test_paths(test_type: str, backend: str) -> tuple[str, str, str]: def get_ignore_switches_for_provider(provider_folders: list[str]) -> list[str]: args = [] for providers in provider_folders: - args.extend( - [ - # TODO(potiuk): remove the old ways once we migrate all providers to the new structure - f"--ignore=providers/tests/{providers}", - f"--ignore=providers/tests/system/{providers}", - f"--ignore=providers/tests/integration/{providers}", - # New structure - f"--ignore=providers/{providers}/tests/", - f"--ignore=providers/{providers}/tests/system", - f"--ignore=providers/{providers}/tests/integration", - ] - ) + args.append(f"--ignore=providers/{providers}/tests/") return args @@ -198,15 +186,25 @@ def get_excluded_provider_args(python_version: str) -> list[str]: for path in AIRFLOW_SOURCES_ROOT.glob("providers/*/*/tests/") ] ) +ALL_PROVIDER_INTEGRATION_TEST_FOLDERS: list[str] = sorted( + [ + path.relative_to(AIRFLOW_SOURCES_ROOT).as_posix() + for path in AIRFLOW_SOURCES_ROOT.glob("providers/*/tests/integration/") + ] + + [ + path.relative_to(AIRFLOW_SOURCES_ROOT).as_posix() + for path in AIRFLOW_SOURCES_ROOT.glob("providers/*/*/tests/integration/") + ] +) + TEST_GROUP_TO_TEST_FOLDERS: dict[GroupOfTests, list[str]] = { GroupOfTests.CORE: ["tests"], - # TODO(potiuk): remove me when we migrate all providers to new structure - GroupOfTests.PROVIDERS: [*ALL_NEW_PROVIDER_TEST_FOLDERS, "providers/tests"], + GroupOfTests.PROVIDERS: ALL_NEW_PROVIDER_TEST_FOLDERS, GroupOfTests.TASK_SDK: ["task_sdk/tests"], GroupOfTests.HELM: ["helm_tests"], GroupOfTests.INTEGRATION_CORE: ["tests/integration"], - GroupOfTests.INTEGRATION_PROVIDERS: ["providers/tests/integration"], + GroupOfTests.INTEGRATION_PROVIDERS: ALL_PROVIDER_INTEGRATION_TEST_FOLDERS, GroupOfTests.PYTHON_API_CLIENT: ["clients/python"], } @@ -293,41 +291,26 @@ def convert_test_type_to_pytest_args( f"[info]Removing {provider_test_to_exclude} from {providers_with_exclusions}[/]" ) providers_with_exclusions.remove(provider_test_to_exclude) - else: - # TODO(potiuk): remove me when all providers are migrated - get_console().print(f"[info]Adding {provider_test_to_exclude} to pytest ignores[/]") - providers_with_exclusions.append( - "--ignore=providers/tests/" + excluded_provider.replace(".", "/") - ) return providers_with_exclusions if test_type.startswith(PROVIDERS_LIST_PREFIX): provider_list = test_type[len(PROVIDERS_LIST_PREFIX) : -1].split(",") providers_to_test = [] for provider in provider_list: - # TODO(potiuk) - remove when all providers are new-style - provider_path = OLD_TESTS_PROVIDERS_ROOT.joinpath(provider.replace(".", "/")).relative_to( - AIRFLOW_SOURCES_ROOT + provider_path = ( + AIRFLOW_PROVIDERS_DIR.joinpath(provider.replace(".", "/")).relative_to( + AIRFLOW_SOURCES_ROOT + ) + / "tests" ) if provider_path.is_dir(): providers_to_test.append(provider_path.as_posix()) else: - # TODO(potiuk) - remove when all providers are new-style - old_provider_path = provider_path - provider_path = ( - AIRFLOW_PROVIDERS_DIR.joinpath(provider.replace(".", "/")).relative_to( - AIRFLOW_SOURCES_ROOT - ) - / "tests" + get_console().print( + f"[error] {provider_path} does not exist for {provider} " + "- which means that this provider has no tests. This is bad idea. " + "Please add it (all providers should have at least a package in tests)." ) - if provider_path.is_dir(): - providers_to_test.append(provider_path.as_posix()) - else: - get_console().print( - f"[error]Neither {old_provider_path} nor {provider_path} exist for {provider} " - "- which means that provider has no tests. This is bad idea. " - "Please add it (all providers should have a package in tests)" - ) - sys.exit(1) + sys.exit(1) return providers_to_test if not test_type.startswith(PROVIDERS_PREFIX): get_console().print(f"[error]Unknown test type for {GroupOfTests.PROVIDERS}: {test_type}[/]") @@ -468,12 +451,4 @@ def convert_parallel_types_to_folders(test_group: GroupOfTests, parallel_test_ty for group_folders in TEST_GROUP_TO_TEST_FOLDERS.values(): for group_folder in group_folders: all_test_prefixes.append(group_folder) - folders = [arg for arg in args if any(arg.startswith(prefix) for prefix in all_test_prefixes)] - # remove specific provider sub-folders if "providers/tests" is already in the list - # This workarounds pytest issues where it will only run tests from specific subfolders - # if both parent and child folders are in the list - # The issue in Pytest (changed behaviour in Pytest 8.2 is tracked here - # https://github.com/pytest-dev/pytest/issues/12605 - if "providers/tests" in folders: - folders = [folder for folder in folders if not folder.startswith("providers/tests/")] - return folders + return [arg for arg in args if any(arg.startswith(prefix) for prefix in all_test_prefixes)] diff --git a/dev/breeze/src/airflow_breeze/utils/selective_checks.py b/dev/breeze/src/airflow_breeze/utils/selective_checks.py index d8d557698ae00..2420a3fb43e2f 100644 --- a/dev/breeze/src/airflow_breeze/utils/selective_checks.py +++ b/dev/breeze/src/airflow_breeze/utils/selective_checks.py @@ -189,7 +189,6 @@ def __hash__(self): r"^\.github/SECURITY\.rst$", r"^airflow/.*\.py$", r"^chart", - r"^providers/src/", r"^providers/.*/src/", r"^task_sdk/src/", r"^tests/system", @@ -209,9 +208,6 @@ def __hash__(self): FileGroupForCi.KUBERNETES_FILES: [ r"^chart", r"^kubernetes_tests", - r"^providers/src/airflow/providers/cncf/kubernetes/", - r"^providers/tests/cncf/kubernetes/", - r"^providers/tests/system/cncf/kubernetes/", r"^providers/cncf/kubernetes/", ], FileGroupForCi.ALL_PYTHON_FILES: [ @@ -273,11 +269,9 @@ def __hash__(self): FileGroupForCi.ALL_AIRFLOW_PYTHON_FILES: [ r"^.*/.*_vendor/.*", r"^airflow/migrations/.*", - r"^providers/src/airflow/providers/.*", r"^providers/.*/src/airflow/providers/.*", r"^dev/.*", r"^docs/.*", - r"^providers/tests/.*", r"^providers/.*/tests/.*", r"^tests/dags/test_imports.py", r"^task_sdk/src/airflow/sdk/.*\.py$", @@ -287,7 +281,6 @@ def __hash__(self): ) PYTHON_OPERATOR_FILES = [ - r"^providers/src/providers/standard/operators/python.py", r"^providers/tests/standard/operators/test_python.py", ] @@ -310,8 +303,6 @@ def __hash__(self): r"^tests/operators/", ], SelectiveProvidersTestType.PROVIDERS: [ - r"^providers/src/airflow/providers/", - r"^providers/tests/", r"^providers/.*/src/airflow/providers/", r"^providers/.*/tests/", ], diff --git a/dev/breeze/tests/test_pytest_args_for_test_types.py b/dev/breeze/tests/test_pytest_args_for_test_types.py index e289a3ba965a2..7f14572291831 100644 --- a/dev/breeze/tests/test_pytest_args_for_test_types.py +++ b/dev/breeze/tests/test_pytest_args_for_test_types.py @@ -25,15 +25,18 @@ # TODO(potiuk): rename to all_providers when we move all providers to the new structure def _all_new_providers() -> list[str]: - all_new_providers: list[str] = [] providers_root = AIRFLOW_SOURCES_ROOT / "providers" - for file in providers_root.rglob("provider.yaml"): - # TODO: remove this check when all providers are moved to the new structure - if file.is_relative_to(providers_root / "src"): - continue - provider_path = file.parent.relative_to(providers_root) - all_new_providers.append(provider_path.as_posix()) - return sorted(all_new_providers) + return sorted( + file.parent.relative_to(providers_root).as_posix() for file in providers_root.rglob("provider.yaml") + ) + + +def _find_all_integration_folders() -> list[str]: + providers_root = AIRFLOW_SOURCES_ROOT / "providers" + return sorted( + provider_posix_path.relative_to(AIRFLOW_SOURCES_ROOT).as_posix() + for provider_posix_path in providers_root.rglob("integration") + ) @pytest.mark.parametrize( @@ -55,7 +58,21 @@ def _all_new_providers() -> list[str]: ( GroupOfTests.INTEGRATION_PROVIDERS, "All", - ["providers/tests/integration"], + [ + "providers/apache/cassandra/tests/integration", + "providers/apache/drill/tests/integration", + "providers/apache/hive/tests/integration", + "providers/apache/kafka/tests/integration", + "providers/apache/pinot/tests/integration", + "providers/google/tests/integration", + "providers/microsoft/mssql/tests/integration", + "providers/mongo/tests/integration", + "providers/openlineage/tests/integration", + "providers/qdrant/tests/integration", + "providers/redis/tests/integration", + "providers/trino/tests/integration", + "providers/ydb/tests/integration", + ], ), ( GroupOfTests.INTEGRATION_CORE, @@ -82,7 +99,6 @@ def _all_new_providers() -> list[str]: "Providers", [ *[f"providers/{provider}/tests" for provider in _all_new_providers()], - "providers/tests", ], ), ( @@ -113,7 +129,6 @@ def _all_new_providers() -> list[str]: for provider in _all_new_providers() if provider not in ["amazon", "google", "microsoft/azure"] ], - "providers/tests", ], ), ( @@ -121,7 +136,6 @@ def _all_new_providers() -> list[str]: "Providers[-edge]", [ *[f"providers/{provider}/tests" for provider in _all_new_providers() if provider != "edge"], - "providers/tests", ], ), ( @@ -134,7 +148,6 @@ def _all_new_providers() -> list[str]: "All-Quarantined", [ *[f"providers/{provider}/tests" for provider in _all_new_providers()], - "providers/tests", "-m", "quarantined", "--include-quarantined", @@ -237,7 +250,6 @@ def test_pytest_args_for_missing_provider(): "Providers", [ *[f"providers/{provider}/tests" for provider in _all_new_providers()], - "providers/tests", ], ), ( @@ -264,7 +276,6 @@ def test_pytest_args_for_missing_provider(): for provider in _all_new_providers() if provider not in ["amazon", "google"] ], - "providers/tests", ], ), ( @@ -276,16 +287,13 @@ def test_pytest_args_for_missing_provider(): for provider in _all_new_providers() if provider not in ["amazon", "google"] ], - "providers/tests", *["providers/amazon/tests", "providers/google/tests"], ], ), ( GroupOfTests.INTEGRATION_PROVIDERS, "All", - [ - "providers/tests/integration", - ], + _find_all_integration_folders(), ), ( GroupOfTests.HELM, diff --git a/dev/breeze/tests/test_run_test_args.py b/dev/breeze/tests/test_run_test_args.py index 42df2f83958f2..1c409670f3d52 100644 --- a/dev/breeze/tests/test_run_test_args.py +++ b/dev/breeze/tests/test_run_test_args.py @@ -16,7 +16,6 @@ # under the License. from __future__ import annotations -import re from unittest.mock import patch import pytest @@ -76,7 +75,7 @@ def test_irregular_provider_with_extra_ignore_should_be_valid_cmd(mock_run_comma _run_test( shell_params=ShellParams(test_group=GroupOfTests.PROVIDERS, test_type="Providers"), - extra_pytest_args=(f"--ignore=providers/tests/{fake_provider_name}",), + extra_pytest_args=(), python_version="3.9", output=None, test_timeout=60, @@ -87,19 +86,7 @@ def test_irregular_provider_with_extra_ignore_should_be_valid_cmd(mock_run_comma # positional arg of the command call run_cmd_call = mock_run_command.call_args_list[1] arg_str = " ".join(run_cmd_call.args[0]) - - # The command pattern we look for is " \ - # <*other args we don't care about*> --ignore providers/tests/ \ - # --ignore providers/tests/system/ --ignore providers/tests/integration/" - # (the container id is simply to anchor the pattern so we know where we are starting; _run_tests should - # be refactored to make arg testing easier but until then we have to regex-test the entire command string - match_pattern = re.compile( - f".* airflow providers/.*/tests.*providers/tests .* --ignore=providers/tests/{fake_provider_name} " - f"--ignore=providers/tests/system/{fake_provider_name} " - f"--ignore=providers/tests/integration/{fake_provider_name}" - ) - - assert match_pattern.search(arg_str), arg_str + assert f"--ignore=providers/{fake_provider_name}/tests/ " in arg_str def test_test_is_skipped_if_all_are_ignored(mock_run_command): diff --git a/dev/breeze/tests/test_selective_checks.py b/dev/breeze/tests/test_selective_checks.py index 9f4c46cfc1c3b..7a5f68aac5bbd 100644 --- a/dev/breeze/tests/test_selective_checks.py +++ b/dev/breeze/tests/test_selective_checks.py @@ -1311,7 +1311,7 @@ def test_full_test_needed_when_scripts_changes(files: tuple[str, ...], expected_ ), ( pytest.param( - ("INTHEWILD.md", "providers/tests/asana.py"), + ("INTHEWILD.md", "providers/asana/tests/asana.py"), ("full tests needed",), "v2-7-stable", { @@ -2462,16 +2462,6 @@ def test_provider_compatibility_checks(labels: tuple[str, ...], expected_outputs (), id="Airflow mypy checks on airflow files with model changes.", ), - pytest.param( - ("providers/src/airflow/providers/a_file.py",), - { - "needs-mypy": "true", - "mypy-checks": "['mypy-providers']", - }, - "main", - (), - id="Airflow mypy checks on provider files", - ), pytest.param( ("task_sdk/src/airflow/sdk/a_file.py",), { diff --git a/dev/example_dags/README.md b/dev/example_dags/README.md deleted file mode 100644 index aef1dddcaa0b6..0000000000000 --- a/dev/example_dags/README.md +++ /dev/null @@ -1,34 +0,0 @@ - - -Run this script in a `docs-archive` folder of checked out `airflow-site` repo -to refresh links to example dags to the right versions. - -Instructions: - -```shell script -git clone https://github.com/apache/airflow-site.git airflow-site -cd airflow-site -export AIRFLOW_SITE_DIRECTORY="$(pwd)" - -cd - -cp update_example_dags_paths.py $AIRFLOW_SITE_DIRECTORY/docs-archive -cd $AIRFLOW_SITE_DIRECTORY/docs-archive -python update_example_dags_paths.py -``` diff --git a/dev/example_dags/update_example_dags_paths.py b/dev/example_dags/update_example_dags_paths.py deleted file mode 100755 index ac8cf4a0feec4..0000000000000 --- a/dev/example_dags/update_example_dags_paths.py +++ /dev/null @@ -1,118 +0,0 @@ -#!/usr/bin/env python -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -import os -import re -from pathlib import Path - -import requests -from rich.console import Console -from rich.progress import Progress - -if __name__ not in ("__main__", "__mp_main__"): - raise SystemExit( - "This file is intended to be executed as an executable program. You cannot use it as a module." - f"To run this script, run the ./{__file__} command [FILE] ..." - ) - - -console = Console(color_system="standard", width=200) - -AIRFLOW_SOURCES_ROOT = Path(__file__).parents[3].resolve() - - -EXAMPLE_DAGS_URL_MATCHER = re.compile( - r"^(.*)(https://github.com/apache/airflow/tree/(.*)/providers/src/airflow/providers/(.*)/example_dags)(/?\".*)$" -) -SYSTEM_TESTS_URL_MATCHER = re.compile( - r"^(.*)(https://github.com/apache/airflow/tree/(.*)/providers/tests/system/(.*))(/?\".*)$" -) - - -def check_if_url_exists(url: str) -> bool: # type: ignore[return] - return True # uncomment to check URLs - response = requests.head(url, allow_redirects=True) - if response.status_code == 200: - return True - if response.status_code == 404: - return False - console.print(f"[red]Unexpected error received: {response.status_code}[/]") - response.raise_for_status() - - -def replace_match(file: str, line: str, provider: str, version: str) -> str | None: - for index, matcher in enumerate([EXAMPLE_DAGS_URL_MATCHER, SYSTEM_TESTS_URL_MATCHER]): - match = matcher.match(line) - if match: - url_path_to_dir = match.group(4) - branch = match.group(3) - if branch.startswith("providers-") and branch.endswith(f"/{version}"): - console.print(f"[green]Already corrected[/]: {provider}:{version}") - continue - system_tests_url = ( - f"https://github.com/apache/airflow/tree/providers-{provider}/{version}" - f"/providers/tests/system/{url_path_to_dir}" - ) - example_dags_url = ( - f"https://github.com/apache/airflow/tree/providers-{provider}/{version}" - f"/providers/src/airflow/providers/{url_path_to_dir}/example_dags" - ) - if check_if_url_exists(system_tests_url) and index == 1: - new_line = re.sub(matcher, r"\1" + system_tests_url + r"\5", line) - elif check_if_url_exists(example_dags_url) and index == 0: - new_line = re.sub(matcher, r"\1" + example_dags_url + r"\5", line) - else: - console.print( - f"[yellow] Neither example dags nor system tests folder" - f" exists for {provider}:{version} -> skipping:[/]" - ) - console.print(line) - return line - if line != new_line: - console.print(f"[yellow] Replacing in {file}[/]\n{line.strip()}\n{new_line.strip()}") - return new_line - return line - - -def find_matches(_file: Path, provider: str, version: str): - lines = _file.read_text().splitlines(keepends=True) - new_lines = [] - for line in lines: - new_line = replace_match(str(_file), line, provider, version) - if new_line: - new_lines.append(new_line) - _file.write_text("".join(new_lines)) - - -if __name__ == "__main__": - curdir: Path = Path(os.curdir).resolve() - dirs: list[Path] = [p for p in curdir.iterdir() if p.is_dir()] - with Progress(console=console) as progress: - task = progress.add_task(f"Updating {len(dirs)}", total=len(dirs)) - for directory in dirs: - if directory.name.startswith("apache-airflow-providers-"): - provider = directory.name[len("apache-airflow-providers-") :] - console.print(f"[bright_blue] Processing {directory}") - for version_dir in directory.iterdir(): - if version_dir.is_dir(): - console.print(version_dir.name) - for candidate_file in version_dir.rglob("*.html"): - if candidate_file.exists(): - find_matches(candidate_file, provider, version_dir.name) - progress.advance(task) diff --git a/dev/moving_providers/README.md b/dev/moving_providers/README.md deleted file mode 100644 index c83b8acc5ad6e..0000000000000 --- a/dev/moving_providers/README.md +++ /dev/null @@ -1,120 +0,0 @@ - - - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Moving providers to new structure](#moving-providers-to-new-structure) - - [How to use the script](#how-to-use-the-script) - - [Options](#options) -- [What happens under the hood](#what-happens-under-the-hood) -- [What to do next](#what-to-do-next) - - - -# Moving providers to new structure - -We are moving providers to a new structure, where each provider has a separate sub-project in -"providers" sub-folder. - -This means that we need to migrate some 90+ providers to the new structure. This is a big task and while we -could do it in one huge PR, it would be disruptive and likely take some time to review and fix some individual -edge-cases - even if we have automated most of the work. - -This directory contains a script that contributors can use to move a provider (or a few providers to the -new structure as a starting point for their PR. Most of the work is automated, but there will be likely -some manual adjustments needed in more complex cases. - -## How to use the script - -The script follows https://peps.python.org/pep-0723/ and uses inlined dependencies - so it can be run as-is -by modern tools without creating dedicated virtualenv - the virtualenv with dependencies is -created on-the-fly by PEP 723 compatible tools. - -For example this one will make a dry-run of moving Alibaba provider: - -```shell -uv run dev/moving_providers/move_providers.py alibaba -``` - -And this one will perform update and move the Alibaba provider: - - -```shell -uv run dev/moving_providers/move_providers.py alibaba --perform-update -``` - -## Options - - -> [!NOTE] -> You can see all the options by running the script with `--help` option: -> -> ```shell -> uv run dev/moving_providers/move_providers.py --help -> ``` - -By default the script runs in dry run mode, which means it will not make any changes to the file system, -but will print what it would do. To actually move the files, you need to pass `--perform-update` flag. - -```shell -uv run dev/moving_providers/move_providers.py alibaba --perform-update -``` - -You can specify multiple providers to move in one go: - -```shell -uv run dev/moving_providers/move_providers.py alibaba amazon microsoft.azure -``` - -You can specify `--verbose` option to see more details about what the script is doing: - -```shell -uv run dev/moving_providers/move_providers.py alibaba --verbose -``` - -You can also specify `--quiet` option to see less output: - -```shell -uv run dev/moving_providers/move_providers.py alibaba --quiet -``` - -# What happens under the hood - -When you run the script with `--perform-update` flag, you will see the diff of the changes -that the script made, and you will be able to scroll through it (with your configured editor) -to verify that it looks good. - -The script will: - -* move the provider to the new structure and apply fixes -* build CI image to add the new provider in the image packages -* run static checks to verify that the moved provider code is good and apply auto-fixes in some cases - -# What to do next - -After all that you need to fix all potential static check problems, run all the tests for the provider and -fix any issues that might happen: - -1) Fix all the static check errors, add them to git -2) run `breeze testing providers-tests --test-type 'Providers[LIST_OF_PROVIDER_IDS_MOVED]'` and fix all tests. -3) Add changes to git, create branch, commit the changes and create a PR! - -Good luck! diff --git a/dev/moving_providers/move_providers.py b/dev/moving_providers/move_providers.py deleted file mode 100755 index 89efdd28321a6..0000000000000 --- a/dev/moving_providers/move_providers.py +++ /dev/null @@ -1,757 +0,0 @@ -#!/usr/bin/env python -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# /// script -# requires-python = ">=3.12" -# dependencies = [ -# "click>=8.1.8", -# "rich>=13.6.0", -# "rich-click>=1.7.1", -# "pyyaml>=6.0.1", -# ] -# /// -from __future__ import annotations - -import difflib -import shutil -import subprocess -import sys -from functools import cache -from pathlib import Path - -import rich_click as click -from rich.console import Console -from rich.syntax import Syntax - -ROOT_PROJECT_DIR_PATH = Path(__file__).parent.parent.parent -PROVIDERS_DIR_PATH = ROOT_PROJECT_DIR_PATH / "providers" -OLD_PROVIDERS_SRC_DIR_PATH = PROVIDERS_DIR_PATH / "src" -OLD_PROVIDERS_AIRFLOW_PROVIDERS_SRC_PACKAGE_PATH = OLD_PROVIDERS_SRC_DIR_PATH / "airflow" / "providers" -OLD_PROVIDERS_TEST_DIR_PATH = ROOT_PROJECT_DIR_PATH / "providers" / "tests" -OLD_PROVIDERS_SYSTEM_TEST_DIR_PATH = OLD_PROVIDERS_TEST_DIR_PATH / "system" -DOCS_DIR_PATH = ROOT_PROJECT_DIR_PATH / "docs" - - -@cache -def _get_all_old_providers() -> list[str]: - return sorted( - [ - ".".join( - provider_yaml_path.parent.relative_to(OLD_PROVIDERS_AIRFLOW_PROVIDERS_SRC_PACKAGE_PATH).parts - ) - for provider_yaml_path in OLD_PROVIDERS_AIRFLOW_PROVIDERS_SRC_PACKAGE_PATH.rglob("provider.yaml") - ] - ) - - -def _get_provider_distribution_name(provider_id: str) -> str: - return f"apache-airflow-providers-{provider_id.replace('.', '-')}" - - -def _get_provider_only_path(provider_id: str) -> str: - return provider_id.replace(".", "/") - - -CONTENT_OVERRIDE = ["This content will be overridden by pre-commit hook"] - -console = Console(color_system="standard") - -is_verbose = False -is_quiet = False -is_dry_run = False - - -def _do_stuff( - *, - syntax: str | None, - from_path: Path | None = None, - to_path: Path | None = None, - from_content: list[str] | None = None, - updated_content: list[str] | None = None, - delete_from: bool = False, - remove_empty_parent_dir: bool = False, -): - if not to_path: - # in place update - to_path = from_path - updated_str = "" - if updated_content: - updated_str = "\n".join(updated_content) + "\n" - if is_verbose and syntax: - console.print(Syntax(updated_str, syntax, theme="ansi_dark")) - console.rule() - if not is_quiet: - if updated_content and from_content and from_path and to_path: - diff = difflib.unified_diff( - from_content, updated_content, fromfile=from_path.as_posix(), tofile=to_path.as_posix() - ) - if syntax: - console.print(Syntax("\n".join(diff), "diff", theme="ansi_dark")) - console.print() - elif updated_content and not from_content and to_path: - console.print(f"\n[yellow]Creating {to_path}:\n") - if syntax: - console.print(Syntax(updated_str, syntax, theme="ansi_dark")) - elif not from_content and not updated_content and from_path and to_path and delete_from: - console.print(f"\n[yellow]Moving[/] {from_path} -> {to_path}\n") - if ( - remove_empty_parent_dir - and from_path.exists() - and len([path for path in from_path.parent.iterdir()]) == 1 - ): - console.print(f"\n[yellow]Removing also empty parent dir {from_path.parent}\n") - elif not from_content and not updated_content and from_path and to_path and not delete_from: - console.print(f"\n[yellow]Copying[/] {from_path} -> {to_path}\n") - elif delete_from and from_path: - console.print(f"\n[yellow]Deleting {from_path}\n") - if remove_empty_parent_dir and len([path for path in from_path.parent.iterdir()]) == 1: - console.print(f"\n[yellow]Removing also empty parent dir {from_path.parent}\n") - if not is_dry_run: - if updated_content and to_path: - to_path.parent.mkdir(parents=True, exist_ok=True) - to_path.write_text(updated_str) - console.print(f"\n[yellow]Written {to_path}\n") - elif not from_content and not updated_content and from_path and to_path: - if delete_from: - to_path.parent.mkdir(parents=True, exist_ok=True) - if from_path.is_dir() and to_path.exists(): - shutil.rmtree(to_path) - if from_path.exists(): - shutil.move(from_path, to_path) - console.print(f"\n[yellow]Moved {from_path} -> {to_path}\n") - if ( - remove_empty_parent_dir - and from_path.exists() - and len([path for path in from_path.parent.iterdir()]) == 0 - ): - console.print(f"\n[yellow]Removed also empty parent dir {from_path.parent}\n") - from_path.parent.rmdir() - return - else: - to_path.parent.mkdir(parents=True, exist_ok=True) - if from_path.is_dir(): - shutil.rmtree(to_path) - shutil.copytree(from_path, to_path) - else: - to_path.write_text(from_path.read_text()) - console.print(f"\n[yellow]Copied {from_path} -> {to_path}\n") - return - if delete_from and from_path: - from_path.unlink() - console.print(f"\n[yellow]Deleted {from_path}\n") - - -def _replace_string(path: Path, old: str, new: str): - content = path.read_text() - count_occurrences = content.count(old) - if count_occurrences: - new_content = content.replace(old, new) - console.print( - f"\n[bright_blue]Replacing `{old}` with `{new}` in `{path}`: " - f"{count_occurrences} occurrences found\n" - ) - if not is_dry_run: - path.write_text(new_content) - - -@click.command() -@click.argument("provider_ids", type=click.Choice(_get_all_old_providers()), required=True, nargs=-1) -@click.option( - "--perform-update", - help="By default the command performs dry-run, explaining what will happen. With `--perform-update` " - "it will actually do the job.", - is_flag=True, -) -@click.option( - "--verbose", - help="Whether to show complete content of generated files. (mutually exclusive with --quiet).", - is_flag=True, -) -@click.option( - "--skip-build-file-generation", - help="When set, the step to generate build files is skipped.", - is_flag=True, -) -@click.option( - "--quiet", - help="Whether to be quite - only show providers updated (mutually exclusive with --verbose).", - is_flag=True, -) -def move_providers( - provider_ids: tuple[str, ...], - perform_update: bool, - skip_build_file_generation: bool, - verbose: bool, - quiet: bool, -): - if quiet and verbose: - console.print("\n[red]Cannot use --quiet and --verbose at the same time\n") - sys.exit(1) - global is_quiet, is_verbose, is_dry_run - is_quiet = quiet - is_verbose = verbose - is_dry_run = not perform_update - if is_dry_run: - console.print( - "\n[yellow]Running in dry-run mode, no changes will be made.[/]\n\n" - "Add `--perform-update` flag to actually make the change.\n" - ) - - console.print("\n[blue]Moving providers:[/]\n") - console.print("* " + "\n *".join(provider_ids)) - console.print() - - for provider_id in provider_ids: - console.rule(f"\n[magenta]Moving provider: {provider_id}[/]\n", align="left") - move_provider(provider_id) - console.rule() - console.print() - - count_providers = len(_get_all_old_providers()) - if perform_update: - subprocess.run("git add .", shell=True, check=True) - if not skip_build_file_generation: - subprocess.run("pre-commit run update-providers-build-files", shell=True, check=False) - subprocess.run("git add . ", shell=True, check=True) - subprocess.run("git diff HEAD", shell=True, check=False) - subprocess.run("uv sync --all-extras", shell=True, check=False) - subprocess.run("breeze static-checks --force-build", shell=True, check=False) - subprocess.run("git add . ", shell=True, check=True) - console.print("\n[bright_green]First part of migration is complete[/].\n") - console.print( - f"\nAfter you create PR and it will be merged there will be {count_providers - len(provider_ids)} providers " - f"left in the old location.\n" - ) - console.print("[yellow]Next steps:[/]\n") - console.print(" 1) Fix all the static check errors, add them to git") - console.print( - f" 2) run `breeze testing providers-tests --test-type " - rf"'Providers\[{','.join(provider_ids)}]'` and fix all tests." - ) - console.print(" 3) Add changes to git, create branch, commit the changes and create a PR!") - console.print("\nGood luck!\n") - else: - console.print("\n[yellow]Dry-run mode, no changes were made.\n") - console.print(f"\nThere are currently {count_providers} providers left in the old structure.\n") - - -def fix_boring_cyborg(provider_id: str): - boring_cyborg_file_path = ROOT_PROJECT_DIR_PATH / ".github" / "boring-cyborg.yml" - console.rule(f"Updating {boring_cyborg_file_path}", style="bright_blue") - original_content = boring_cyborg_file_path.read_text().splitlines() - updated_content = [] - in_provider = False - for line in original_content: - if not in_provider: - updated_content.append(line) - if line.strip() == f"provider:{provider_id.replace('.', '-')}:": - in_provider = True - updated_content.append(f" - providers/{provider_id.replace('.', '/')}/**") - updated_content.append("") - if in_provider and line.strip() == "": - in_provider = False - _do_stuff( - syntax="yaml", - from_path=boring_cyborg_file_path, - from_content=original_content, - updated_content=updated_content, - ) - provider_only_path = _get_provider_only_path(provider_id) - _replace_string( - boring_cyborg_file_path, - f"providers/src/airflow/providers/{provider_only_path}", - f"providers/{provider_only_path}/src/airflow/providers/{provider_only_path}", - ) - _replace_string( - boring_cyborg_file_path, - f"providers/tests/{provider_only_path}", - f"providers/{provider_only_path}/tests/provider_tests/{provider_only_path}", - ) - console.rule(style="bright_blue") - - -def add_docs_to_gitignore(provider_id: str): - gitignore_path = DOCS_DIR_PATH / ".gitignore" - console.rule(f"Updating {gitignore_path}", style="bright_blue") - original_content = gitignore_path.read_text().splitlines() - provider_line = f"apache-airflow-providers-{provider_id.replace('.', '-')}" - if provider_line in original_content: - console.print(f"\n[yellow]Provider {provider_id} already in .gitignore\n") - return - updated_content: list[str] = [] - updated = False - for line in original_content: - if not line.startswith("#") and line > provider_line and not updated: - updated_content.append(provider_line) - updated = True - updated_content.append(line) - if not updated: - updated_content.append(provider_line) - _do_stuff( - syntax="gitignore", - from_path=gitignore_path, - from_content=original_content, - updated_content=updated_content, - ) - console.rule(style="bright_blue") - - -def remove_changelog(provider_id: str): - changelog_path = DOCS_DIR_PATH / _get_provider_distribution_name(provider_id) / "changelog.rst" - console.rule(f"Deleting {changelog_path}", style="bright_blue") - _do_stuff(syntax="gitignore", from_path=changelog_path, delete_from=True) - console.rule(style="bright_blue") - - -def create_readme(provider_id: str): - readme_path = PROVIDERS_DIR_PATH / _get_provider_only_path(provider_id) / "README.rst" - console.rule(f"Creating {readme_path}", style="bright_blue") - _do_stuff(syntax="rst", to_path=readme_path, updated_content=CONTENT_OVERRIDE) - console.rule(style="bright_blue") - - -def move_docs(provider_id: str): - source_doc_dir = DOCS_DIR_PATH / _get_provider_distribution_name(provider_id) - dest_doc_dir = PROVIDERS_DIR_PATH / _get_provider_only_path(provider_id) / "docs" - console.rule(f"Moving docs to {dest_doc_dir}", style="bright_blue") - _do_stuff(syntax="rst", from_path=source_doc_dir, to_path=dest_doc_dir, delete_from=True) - provider_package_source_dir = OLD_PROVIDERS_AIRFLOW_PROVIDERS_SRC_PACKAGE_PATH / _get_provider_only_path( - provider_id - ) - _do_stuff( - syntax="rst", - from_path=provider_package_source_dir / "CHANGELOG.rst", - to_path=dest_doc_dir / "changelog.rst", - delete_from=True, - ) - if (provider_package_source_dir / ".latest-doc-only-change.txt").exists(): - _do_stuff( - syntax="txt", - from_path=provider_package_source_dir / ".latest-doc-only-change.txt", - to_path=dest_doc_dir / ".latest-doc-only-change.txt", - delete_from=True, - ) - console.rule(style="bright_blue") - - -def move_provider_yaml(provider_id: str) -> tuple[list[str], list[str], list[str]]: - source_provider_yaml_path = ( - OLD_PROVIDERS_AIRFLOW_PROVIDERS_SRC_PACKAGE_PATH - / _get_provider_only_path(provider_id) - / "provider.yaml" - ) - target_provider_yaml_path = PROVIDERS_DIR_PATH / _get_provider_only_path(provider_id) / "provider.yaml" - console.rule(f"Moving {source_provider_yaml_path} to {target_provider_yaml_path}", style="bright_blue") - original_content = source_provider_yaml_path.read_text().splitlines() - in_dependencies = False - in_optional_dependencies = False - in_devel_dependencies = False - updated_content = [] - - dependencies = [] - optional_dependencies = [] - devel_dependencies = [] - already_moved_logos = set() - for line in original_content: - if line.startswith(" logo: "): - logo_path = line[len(" logo: ") :] - logo_name = logo_path.split("/")[-1] - if logo_path in already_moved_logos: - continue - new_logo_dir = ( - PROVIDERS_DIR_PATH / _get_provider_only_path(provider_id) / "docs" / "integration-logos" - ) - new_logo_path = new_logo_dir / logo_name - if logo_name in already_moved_logos: - continue - already_moved_logos.add(logo_name) - _do_stuff( - syntax="none", - from_path=DOCS_DIR_PATH / Path(logo_path[1:]), - to_path=new_logo_path, - delete_from=True, - remove_empty_parent_dir=True, - ) - line = f" logo: /docs/integration-logos/{logo_name}" - already_moved_logos.add(logo_path) - if line == "dependencies:" and not in_dependencies: - in_dependencies = True - continue - if in_dependencies: - if not line: - continue - if line.startswith(" -"): - dependencies.append(f' "{line[len(" - ") :]}",') - elif line.strip().startswith("#"): - dependencies.append(f" {line.strip()}") - else: - in_dependencies = False - if line == "devel-dependencies:" and not in_devel_dependencies: - in_devel_dependencies = True - continue - if in_devel_dependencies: - if not line: - continue - if line.startswith(" - "): - devel_dependencies.append(f' "{line[len(" - ") :]}",') - elif line.strip().startswith("#"): - devel_dependencies.append(f" {line.strip()}") - else: - in_devel_dependencies = False - if line == "additional-extras:" and not in_optional_dependencies: - in_optional_dependencies = True - continue - if in_optional_dependencies: - if not line: - continue - if line.startswith(" "): - optional_dependencies.append(line) - else: - in_optional_dependencies = False - if not in_dependencies and not in_optional_dependencies and not in_devel_dependencies: - updated_content.append(line) - _do_stuff( - syntax="yml", - from_path=source_provider_yaml_path, - to_path=target_provider_yaml_path, - from_content=original_content, - updated_content=updated_content, - delete_from=True, - ) - if optional_dependencies: - in_dependency = False - optional_dependencies_processed = [] - for line in optional_dependencies: - if line.startswith(" - name: "): - name = line[len(" - name: ") :] - if in_dependency: - optional_dependencies_processed.append("]") - optional_dependencies_processed.append(f'"{name}" = [') - in_dependency = True - elif line.startswith(" -"): - dependency = line[len(" - ") :] - optional_dependencies_processed.append(f' "{dependency}",') - elif line.startswith(" #"): - optional_dependencies_processed.append(f" {line.strip()}") - elif line.startswith(" #"): - if in_dependency: - optional_dependencies_processed.append("]") - in_dependency = False - optional_dependencies_processed.append(f"{line.strip()}") - optional_dependencies_processed.append("]") - else: - optional_dependencies_processed = [] - console.rule(style="bright_blue") - return ( - dependencies, - devel_dependencies, - optional_dependencies_processed, - ) - - -def create_pyproject_toml( - provider_id: str, - dependencies: list[str], - devel_dependencies: list[str], - optional_dependencies: list[str], -): - dependencies_str = "\n".join(dependencies) - devel_dependencies_str = "\n".join(devel_dependencies) - optional_dependencies_str = "\n".join(optional_dependencies) - start_pyproject_toml = f""" -# Content of this file will be replaced by pre-commit hook -[build-system] -requires = ["flit_core==3.10.1"] -build-backend = "flit_core.buildapi" - -[project] -name = "apache-airflow-providers-SOME_PROVIDER" -version = "VERSION" -description = "Provider package PROVIDER for Apache Airflow" -readme = "README.rst" - -dependencies = [ -{dependencies_str} -] -""" - optional_dependencies_toml = f""" -[project.optional-dependencies] -{optional_dependencies_str} -""" - devel_dependencies_toml = f""" -[dependency-groups] -dev = [ -{devel_dependencies_str} -] - -[project.urls] -""" - pyproject_toml_path = PROVIDERS_DIR_PATH / _get_provider_only_path(provider_id) / "pyproject.toml" - console.rule( - f"Creating basic pyproject.toml for {provider_id} in {pyproject_toml_path}", style="bright_blue" - ) - - pyproject_toml_content = start_pyproject_toml - if optional_dependencies: - pyproject_toml_content += optional_dependencies_toml - if devel_dependencies: - pyproject_toml_content += devel_dependencies_toml - - _do_stuff(syntax="toml", to_path=pyproject_toml_path, updated_content=pyproject_toml_content.splitlines()) - console.rule(style="bright_blue") - - -def move_sources(provider_id: str): - source_provider_dir = OLD_PROVIDERS_AIRFLOW_PROVIDERS_SRC_PACKAGE_PATH / _get_provider_only_path( - provider_id - ) - dest_provider_dir = ( - PROVIDERS_DIR_PATH - / _get_provider_only_path(provider_id) - / "src" - / "airflow" - / "providers" - / _get_provider_only_path(provider_id) - ) - console.rule(f"Moving sources from {source_provider_dir} to {dest_provider_dir}", style="bright_blue") - _do_stuff(syntax="bash", from_path=source_provider_dir, to_path=dest_provider_dir, delete_from=True) - console.rule(style="bright_blue") - - -def move_tests(provider_id: str): - source_test_dir = OLD_PROVIDERS_TEST_DIR_PATH / _get_provider_only_path(provider_id) - airflow_tests_dir = ROOT_PROJECT_DIR_PATH / "tests" - root_dest_test_dir = PROVIDERS_DIR_PATH / _get_provider_only_path(provider_id) / "tests" - dest_test_dir = root_dest_test_dir / "provider_tests" / _get_provider_only_path(provider_id) - console.rule(f"Moving tests from {source_test_dir} to {dest_test_dir}", style="bright_blue") - - for test_file_path in source_test_dir.rglob("*.py"): - _replace_string( - test_file_path, f"from providers.tests.{provider_id}", f"from provider_tests.{provider_id}" - ) - for test_file_path in airflow_tests_dir.rglob("*.py"): - _replace_string( - test_file_path, f"from providers.tests.{provider_id}", f"from provider_tests.{provider_id}" - ) - _do_stuff(syntax="bash", from_path=source_test_dir, to_path=dest_test_dir, delete_from=True) - - conftest_py_content = """ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -import pathlib - -import pytest - -pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) -"""[1:] - - conftest_py_path = root_dest_test_dir / "conftest.py" - _do_stuff(syntax="python", to_path=conftest_py_path, updated_content=conftest_py_content.splitlines()) - init_content = """ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore -"""[1:] - for parent in dest_test_dir.parents: - if parent.name == "tests": - break - init_file = parent / "__init__.py" - if not init_file.exists(): - _do_stuff(syntax="python", to_path=init_file, updated_content=init_content.splitlines()) - - console.rule(style="bright_blue") - - -def move_system_tests(provider_id: str) -> bool: - source_system_test_dir = OLD_PROVIDERS_SYSTEM_TEST_DIR_PATH / _get_provider_only_path(provider_id) - if source_system_test_dir.exists(): - dest_system_test_dir = ( - PROVIDERS_DIR_PATH - / _get_provider_only_path(provider_id) - / "tests" - / "system" - / _get_provider_only_path(provider_id) - ) - console.rule( - f"Moving system tests from {source_system_test_dir} to {dest_system_test_dir}", - style="bright_blue", - ) - _do_stuff( - syntax="bash", from_path=source_system_test_dir, to_path=dest_system_test_dir, delete_from=True - ) - console.rule(style="bright_blue") - return True - return False - - -def replace_system_test_example_includes(provider_id: str): - target_doc_providers_dir = PROVIDERS_DIR_PATH / _get_provider_only_path(provider_id) / "docs" - console.rule(f"Replacing system test example includes in {target_doc_providers_dir}", style="bright_blue") - for rst_file in target_doc_providers_dir.rglob("*.rst"): - provider_only_path = _get_provider_only_path(provider_id) - _replace_string( - rst_file, - f"../providers/tests/system/{provider_only_path}/", - f"../providers/{provider_only_path}/tests/system/{provider_only_path}/", - ) - _replace_string( - rst_file, - f"|version|/providers/tests/system/{provider_only_path}/", - f"|version|/providers/{provider_only_path}/tests/system/{provider_only_path}/", - ) - console.rule(style="bright_blue") - - -def update_airflow_pyproject_toml(provider_id): - pyproject_toml_path = ROOT_PROJECT_DIR_PATH / "pyproject.toml" - console.rule(f"Updating {pyproject_toml_path}", style="bright_blue") - content = pyproject_toml_path.read_text().splitlines() - updated_content: list[str] = [] - distribution_name = _get_provider_distribution_name(provider_id) - only_provider_path = _get_provider_only_path(provider_id) - in_dependency_groups = False - in_tool_uv_sources = False - in_tool_uv_workspace = False - - dependency_line_to_add = f' "{distribution_name}",' - sources_line_to_add = f"{distribution_name} = {{ workspace = true }}" - workspace_line_to_add = f' "providers/{only_provider_path}",' - - for line in content: - if line.startswith("[dependency-groups]"): - in_dependency_groups = True - elif in_dependency_groups and ( - line.startswith("]") or (line.startswith(' "apache') and dependency_line_to_add < line) - ): - updated_content.append(dependency_line_to_add) - in_dependency_groups = False - if line.startswith("[tool.uv.sources]"): - in_tool_uv_sources = True - elif in_tool_uv_sources and ( - line.strip() == "" - or line.startswith("[") - or (line.startswith("apache") and sources_line_to_add < line) - ): - updated_content.append(sources_line_to_add) - in_tool_uv_sources = False - if line.startswith("[tool.uv.workspace]"): - in_tool_uv_workspace = True - elif in_tool_uv_workspace and ( - line.startswith("]") or (line.startswith(" ") and workspace_line_to_add < line) - ): - updated_content.append(workspace_line_to_add) - in_tool_uv_workspace = False - updated_content.append(line) - _do_stuff( - syntax="toml", - from_path=pyproject_toml_path, - from_content=content, - updated_content=updated_content, - ) - console.rule(style="bright_blue") - - -def fix_selective_checks_test(provider_id: str): - selective_checks_test_path = ( - ROOT_PROJECT_DIR_PATH / "dev" / "breeze" / "tests" / "test_selective_checks.py" - ) - provider_only_path = _get_provider_only_path(provider_id) - console.rule(f"Updating {selective_checks_test_path}", style="bright_blue") - _replace_string( - selective_checks_test_path, - f"providers/src/airflow/providers/{provider_only_path}", - f"providers/{provider_only_path}/src/airflow/providers/{provider_only_path}", - ) - console.rule(style="bright_blue") - - -def update_pre_commit_config(provider_id: str): - pre_commit_config_file = ROOT_PROJECT_DIR_PATH / ".pre-commit-config.yaml" - console.rule(f"Updating {pre_commit_config_file}", style="bright_blue") - _replace_string( - pre_commit_config_file, - f"providers/src/airflow/providers/{provider_id.replace('.', '/')}", - f"providers/{provider_id.replace('.', '/')}/src/airflow/providers/{provider_id.replace('.', '/')}", - ) - _replace_string( - pre_commit_config_file, - f"providers/tests/{provider_id.replace('.', '/')}", - f"providers/{provider_id.replace('.', '/')}/tests/provider_tests/{provider_id.replace('.', '/')}", - ) - console.rule(style="bright_blue") - - -def move_provider(provider_id: str): - fix_selective_checks_test(provider_id) - update_pre_commit_config(provider_id) - fix_boring_cyborg(provider_id) - update_airflow_pyproject_toml(provider_id) - add_docs_to_gitignore(provider_id) - remove_changelog(provider_id) - create_readme(provider_id) - move_docs(provider_id) - dependencies, devel_dependencies, optional_dependencies = move_provider_yaml(provider_id) - create_pyproject_toml(provider_id, dependencies, devel_dependencies, optional_dependencies) - move_sources(provider_id) - move_tests(provider_id) - has_system_test = move_system_tests(provider_id) - if has_system_test: - replace_system_test_example_includes(provider_id) - - -if __name__ == "__main__": - move_providers() diff --git a/docs/apache-airflow-providers/index.rst b/docs/apache-airflow-providers/index.rst index 4e37fd26afa9a..c4cf49d1bb696 100644 --- a/docs/apache-airflow-providers/index.rst +++ b/docs/apache-airflow-providers/index.rst @@ -185,7 +185,7 @@ provider packages are automatically documented in the release notes of every pro If you want to contribute to ``Apache Airflow``, you can see how to build and extend community managed providers in -``https://github.com/apache/airflow/blob/main/providers/src/airflow/providers/MANAGING_PROVIDERS_LIFECYCLE.rst``. +``https://github.com/apache/airflow/blob/main/providers/MANAGING_PROVIDERS_LIFECYCLE.rst``. .. toctree:: :hidden: diff --git a/docs/conf.py b/docs/conf.py index d39efac79843a..cb3abd612acbc 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -226,7 +226,11 @@ "sphinx_jinja", ] ) - exclude_patterns = ["operators/_partials"] + empty_subpackages = ["apache", "atlassian", "common", "cncf", "dbt", "microsoft"] + exclude_patterns = [ + "operators/_partials", + *[f"_api/tests/system/{subpackage}/index.rst" for subpackage in empty_subpackages], + ] else: exclude_patterns = [] @@ -778,23 +782,6 @@ def _get_params(root_schema: dict, prefix: str = "", default_section: str = "") "*/tests/system/__init__.py", "*/tests/system/example_empty.py", "*/test_aws_auth_manager.py", - # These sub-folders aren't really providers, but we need __init__.py files else various tools (ruff, mypy) - # get confused by providers/tests/systems/cncf/kubernetes and think that folder is the top level - # kubernetes module! - # TODO (potiuk): remove these once we move all providers to the new structure - "*/providers/src/airflow/providers/__init__.py", - "*/providers/tests/__init__.py", - "*/providers/tests/cncf/__init__.py", - "*/providers/tests/common/__init__.py", - "*/providers/tests/apache/__init__.py", - "*/providers/tests/dbt/__init__.py", - "*/providers/tests/microsoft/__init__.py", - "*/providers/tests/system/__init__.py", - "*/providers/tests/system/apache/__init__.py", - "*/providers/tests/system/cncf/__init__.py", - "*/providers/tests/system/common/__init__.py", - "*/providers/tests/system/dbt/__init__.py", - "*/providers/tests/system/microsoft/__init__.py", ] ignore_re = re.compile(r"\[AutoAPI\] .* Ignoring \s (?P/[\w/.]*)", re.VERBOSE) @@ -823,8 +810,6 @@ def filter_ignore(record: logging.LogRecord) -> bool: "*/airflow/__init__.py", "*/airflow/providers/__init__.py", "*/example_dags/*", - "*/airflow/providers/cncf/kubernetes/backcompat/*", - "*/providers/src/apache/airflow/providers/cncf/kubernetes/backcompat/*", "*/providers/__init__.py", ) ) diff --git a/docs/exts/docs_build/docs_builder.py b/docs/exts/docs_build/docs_builder.py index 731e825ab387b..3ec517dc5f2d0 100644 --- a/docs/exts/docs_build/docs_builder.py +++ b/docs/exts/docs_build/docs_builder.py @@ -215,7 +215,7 @@ def cleanup_new_provider_dir(self): ) shutil.rmtree(self._src_dir, ignore_errors=True) console.print( - f"[bright_blue]{self.package_name:60}:[/] [magenta](NEW)[/] Coping docs " + f"[bright_blue]{self.package_name:60}:[/] [magenta](NEW)[/] Copying docs " f"from {self.provider_path}/docs to {self._src_dir}." ) shutil.copytree( diff --git a/docs/exts/exampleinclude.py b/docs/exts/exampleinclude.py index ee5d9329d2a25..4d1c2c3095f3d 100644 --- a/docs/exts/exampleinclude.py +++ b/docs/exts/exampleinclude.py @@ -189,11 +189,7 @@ def create_node(env, relative_path, show_button): :return paragraph with the node """ - # Strip "providers" out of the example title that we include/link to. The full path needs to include - # it so we can pull in the code, but we don't want it to show up in the rendered docs - if relative_path.startswith("providers/src/"): - relative_path = relative_path.replace("providers/src/", "", 1) - elif relative_path.startswith("providers/"): + if relative_path.startswith("providers/"): relative_path = relative_path.replace("providers/", "", 1) if relative_path.endswith(".py"): diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index 22f32d8405199..ff430dfd0a148 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -589,6 +589,7 @@ Env env envFrom EnvFromSource +envs EnvVar envvar eof diff --git a/providers/src/airflow/providers/MANAGING_PROVIDERS_LIFECYCLE.rst b/providers/MANAGING_PROVIDERS_LIFECYCLE.rst similarity index 93% rename from providers/src/airflow/providers/MANAGING_PROVIDERS_LIFECYCLE.rst rename to providers/MANAGING_PROVIDERS_LIFECYCLE.rst index 9e7a601b1a56d..749f8e67caf9a 100644 --- a/providers/src/airflow/providers/MANAGING_PROVIDERS_LIFECYCLE.rst +++ b/providers/MANAGING_PROVIDERS_LIFECYCLE.rst @@ -120,7 +120,7 @@ breeze and I'll run unit tests for my Hook. .. code-block:: bash - root@fafd8d630e46:/opt/airflow# python -m pytest providers/tests//hook/test_*.py + root@fafd8d630e46:/opt/airflow# python -m pytest providers//tests//hook/test_*.py Adding chicken-egg providers ---------------------------- @@ -219,22 +219,19 @@ by ``breeze release-management`` command by release manager when providers are r .. code-block:: bash ├── pyproject.toml - ├── providers/src/airflow/providers/ - │ └── / - │ ├── provider.yaml - │ └── CHANGELOG.rst - │ - └── docs/ - ├── apache-airflow/ - │ └── extra-packages-ref.rst - ├── integration-logos// - │ └── .png - └── apache-airflow-providers-/ - ├── index.rst - ├── commits.rst - ├── connections.rst - └── operators/ - └── .rst + └── providers//src/airflow/providers/ + ├── provider.yaml + ├── pyproject.toml + ├── CHANGELOG.rst + │ + └── docs/ + ├── integration-logos + │ └── .png + ├── index.rst + ├── commits.rst + ├── connections.rst + └── operators/ + └── .rst There is a chance that your provider's name is not a common English word. @@ -247,7 +244,7 @@ In the ``docs/apache-airflow-providers-/connections.rst``: - add information how to configure connection for your provider. -In the ``docs/apache-airflow-providers-/operators/.rst`` add information +In the provider's ``docs/operators/.rst`` add information how to use the Operator. It's important to add examples and additional information if your Operator has extra-parameters. @@ -267,13 +264,13 @@ Operator has extra-parameters. The NewProviderOperator requires a ``connection_id`` and this other awesome parameter. You can see an example below: - .. exampleinclude:: /../../providers/src/airflow/providers//example_dags/example_.py + .. exampleinclude:: /../../providers//example_dags/example_.py :language: python :start-after: [START howto_operator_] :end-before: [END howto_operator_] -Copy from another, similar provider the docs: ``docs/apache-airflow-providers-/*.rst``: +Copy from another, similar provider the docs: ``docs/*.rst``: At least those docs should be present @@ -287,7 +284,7 @@ At least those docs should be present Make sure to update/add all information that are specific for the new provider. -In the ``providers/src/airflow/providers//provider.yaml`` add information of your provider: +In the ``providers//src/airflow/providers//provider.yaml`` add information of your provider: .. code-block:: yaml @@ -301,7 +298,7 @@ In the ``providers/src/airflow/providers//provider.yaml`` add info integrations: - integration-name: external-doc-url: https://www.example.io/ - logo: /integration-logos//.png + logo: /docs/integration-logos/.png how-to-guide: - /docs/apache-airflow-providers-/operators/.rst tags: [service] @@ -352,23 +349,23 @@ Example failing collection after ``google`` provider has been suspended: .. code-block:: txt - _____ ERROR collecting providers/tests/apache/beam/operators/test_beam.py ______ - ImportError while importing test module '/opt/airflow/providers/tests/apache/beam/operators/test_beam.py'. + _____ ERROR collecting providers/apache/beam/tests/apache/beam/operators/test_beam.py ______ + ImportError while importing test module '/opt/airflow/providers/apache/beam/tests/apache/beam/operators/test_beam.py'. Hint: make sure your test modules/packages have valid Python names. Traceback: /usr/local/lib/python3.8/importlib/__init__.py:127: in import_module return _bootstrap._gcd_import(name[level:], package, level) - providers/tests/apache/beam/operators/test_beam.py:25: in + providers/apache/beam/tests/apache/beam/operators/test_beam.py:25: in from airflow.providers.apache.beam.operators.beam import ( airflow/providers/apache/beam/operators/beam.py:35: in from airflow.providers.google.cloud.hooks.dataflow import ( airflow/providers/google/cloud/hooks/dataflow.py:32: in from google.cloud.dataflow_v1beta3 import GetJobRequest, Job, JobState, JobsV1Beta3AsyncClient, JobView E ModuleNotFoundError: No module named 'google.cloud.dataflow_v1beta3' - _ ERROR collecting providers/tests/microsoft/azure/transfers/test_azure_blob_to_gcs.py _ + _ ERROR collecting providers/microsoft/azure/tests/microsoft/azure/transfers/test_azure_blob_to_gcs.py _ -The fix is to add this line at the top of the ``providers/tests/apache/beam/operators/test_beam.py`` module: +The fix is to add this line at the top of the ``providers/apache/beam/tests/apache/beam/operators/test_beam.py`` module: .. code-block:: python diff --git a/providers/airbyte/tests/conftest.py b/providers/airbyte/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/airbyte/tests/conftest.py +++ b/providers/airbyte/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/__init__.py b/providers/airbyte/tests/system/__init__.py similarity index 100% rename from providers/__init__.py rename to providers/airbyte/tests/system/__init__.py diff --git a/providers/alibaba/docs/index.rst b/providers/alibaba/docs/index.rst index 09808f4022dd7..a8314750a354f 100644 --- a/providers/alibaba/docs/index.rst +++ b/providers/alibaba/docs/index.rst @@ -56,7 +56,7 @@ :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/providers/alibaba/tests/conftest.py b/providers/alibaba/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/alibaba/tests/conftest.py +++ b/providers/alibaba/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/tests/__init__.py b/providers/alibaba/tests/system/__init__.py similarity index 100% rename from providers/tests/__init__.py rename to providers/alibaba/tests/system/__init__.py diff --git a/providers/amazon/docs/auth-manager/setup/amazon-verified-permissions.rst b/providers/amazon/docs/auth-manager/setup/amazon-verified-permissions.rst index 85d68a1a9a138..453514d71f041 100644 --- a/providers/amazon/docs/auth-manager/setup/amazon-verified-permissions.rst +++ b/providers/amazon/docs/auth-manager/setup/amazon-verified-permissions.rst @@ -79,7 +79,7 @@ Update the policy store schema the policy store. * You have an existing policy store used for Airflow and you made some modifications to its schema you want to revert. * You have an existing policy store used for Airflow and you want to update its schema to the latest version. - This is only needed if your policy store schema and `the latest schema version `_ + This is only needed if your policy store schema and `the latest schema version `_ are different. If so, there should be a warning message when Airflow is starting. With CLI @@ -100,7 +100,7 @@ Please follow the instructions below to update the Amazon Verified Permissions p 2. Choose the policy store used by Airflow (by default its description is ``Airflow``). 3. In the navigation pane on the left, choose **Schema**. 4. Choose **Edit schema** and then choose **JSON mode**. -5. Enter the content of `the latest schema version `_ +5. Enter the content of `the latest schema version `_ in the **Contents** field. 6. Choose **Save changes**. diff --git a/providers/amazon/docs/executors/batch-executor.rst b/providers/amazon/docs/executors/batch-executor.rst index aa62ea74bf10c..b198100473d75 100644 --- a/providers/amazon/docs/executors/batch-executor.rst +++ b/providers/amazon/docs/executors/batch-executor.rst @@ -19,7 +19,7 @@ .. warning:: The Batch Executor is alpha/experimental at the moment and may be subject to change without warning. .. |executorName| replace:: Batch -.. |dockerfileLink| replace:: `here `__ +.. |dockerfileLink| replace:: `here `__ .. |configKwargs| replace:: SUBMIT_JOB_KWARGS ================== diff --git a/providers/amazon/docs/executors/ecs-executor.rst b/providers/amazon/docs/executors/ecs-executor.rst index e27c2480f7444..5bde473f34d36 100644 --- a/providers/amazon/docs/executors/ecs-executor.rst +++ b/providers/amazon/docs/executors/ecs-executor.rst @@ -17,7 +17,7 @@ .. |executorName| replace:: ECS -.. |dockerfileLink| replace:: `here `__ +.. |dockerfileLink| replace:: `here `__ .. |configKwargs| replace:: SUBMIT_JOB_KWARGS ================ diff --git a/providers/amazon/src/airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py b/providers/amazon/src/airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py index 6561e8121c465..ee0b96b436161 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py @@ -420,7 +420,7 @@ def _check_avp_schema_version(self): if not self.avp_facade.is_policy_store_schema_up_to_date(): self.log.warning( "The Amazon Verified Permissions policy store schema is different from the latest version " - "(https://github.com/apache/airflow/blob/main/providers/src/airflow/providers/amazon/aws/auth_manager/avp/schema.json). " + "(https://github.com/apache/airflow/blob/main/providers/amazon/aws/src/airflow/providers/amazon/aws/auth_manager/avp/schema.json). " "Please update it to its latest version. " "See doc: https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/auth-manager/setup/amazon-verified-permissions.html#update-the-policy-store-schema." ) diff --git a/providers/amazon/src/airflow/providers/amazon/aws/triggers/README.md b/providers/amazon/src/airflow/providers/amazon/aws/triggers/README.md index 43f89b4b0ccc9..b7796dd603f83 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/triggers/README.md +++ b/providers/amazon/src/airflow/providers/amazon/aws/triggers/README.md @@ -126,7 +126,7 @@ Here, we are calling the `get_waiter` function defined in `base_aws.py` which ta } ``` -For more information about writing custom waiter, see the [README.md](https://github.com/apache/airflow/blob/main/providers/src/airflow/providers/amazon/aws/waiters/README.md) for custom waiters. +For more information about writing custom waiter, see the [README.md](https://github.com/apache/airflow/blob/main/providers/amazon/aws/src/airflow/providers/amazon/aws/waiters/README.md) for custom waiters. In some cases, a built-in or custom waiter may not be able to solve the problem. In such cases, the asynchronous method used to poll the boto3 API would need to be defined in the hook of the service being used. This method is essentially the same as the synchronous version of the method, except that it will use the aiobotocore client, and will be awaited. For the Redshift example, the async `describe_clusters` method would look as follows: diff --git a/providers/amazon/tests/conftest.py b/providers/amazon/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/amazon/tests/conftest.py +++ b/providers/amazon/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/tests/apache/__init__.py b/providers/amazon/tests/provider_tests/amazon/aws/auth_manager/router/__init__.py similarity index 100% rename from providers/tests/apache/__init__.py rename to providers/amazon/tests/provider_tests/amazon/aws/auth_manager/router/__init__.py diff --git a/providers/amazon/tests/provider_tests/amazon/aws/system/utils/test_helpers.py b/providers/amazon/tests/provider_tests/amazon/aws/system/utils/test_helpers.py index 1ee2cf0f3edfb..1b1f5a87be482 100644 --- a/providers/amazon/tests/provider_tests/amazon/aws/system/utils/test_helpers.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/system/utils/test_helpers.py @@ -16,7 +16,7 @@ # under the License. """ This module contains the unit tests for the helper methods included in the Amazon System Tests found at -providers/tests/system/amazon/aws/utils/__init__.py +providers/amazon/tests/system/amazon/aws/utils/__init__.py """ from __future__ import annotations @@ -28,8 +28,8 @@ import pytest from moto import mock_aws -from providers.amazon.tests.system.amazon.aws import utils -from providers.amazon.tests.system.amazon.aws.utils import ( +from system.amazon.aws import utils +from system.amazon.aws.utils import ( DEFAULT_ENV_ID_LEN, DEFAULT_ENV_ID_PREFIX, ENV_ID_ENVIRON_KEY, diff --git a/providers/tests/integration/redis/hooks/__init__.py b/providers/amazon/tests/system/__init__.py similarity index 90% rename from providers/tests/integration/redis/hooks/__init__.py rename to providers/amazon/tests/system/__init__.py index 13a83393a9124..e8fd22856438c 100644 --- a/providers/tests/integration/redis/hooks/__init__.py +++ b/providers/amazon/tests/system/__init__.py @@ -14,3 +14,4 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/amazon/tests/system/amazon/README.md b/providers/amazon/tests/system/amazon/README.md index 57ea665921f48..dff8fdd1ad24f 100644 --- a/providers/amazon/tests/system/amazon/README.md +++ b/providers/amazon/tests/system/amazon/README.md @@ -21,7 +21,7 @@ ## Tests structure -All AWS-related system tests are located inside `providers/tests/system/amazon/aws/`. +All AWS-related system tests are located inside `providers/amazon/tests/system/amazon/aws/`. In this directory you will find test files in the form of Example DAGs, one DAG per file. Each test should be self-contained but in the case where additional resources are required, they can be found in the `resources` directory on the same level as tests or noted in the @@ -30,12 +30,12 @@ test's docstring. Each test file should start with prefix `example_*`. Example directory structure: ``` -providers/tests/system/amazon/aws/tests - ├── example_athena.py - ├── example_batch.py - . - ├── example_step_functions.py - └── * +providers/amazon/tests/system/amazon/aws/tests + ├── example_athena.py + ├── example_batch.py + . + ├── example_step_functions.py + └── * ``` ## Initial configuration diff --git a/providers/amazon/tests/system/amazon/aws/example_appflow.py b/providers/amazon/tests/system/amazon/aws/example_appflow.py index 33ef42fbcf110..d22b633b70eab 100644 --- a/providers/amazon/tests/system/amazon/aws/example_appflow.py +++ b/providers/amazon/tests/system/amazon/aws/example_appflow.py @@ -18,8 +18,6 @@ from datetime import datetime -from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder - from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.appflow import ( @@ -30,6 +28,7 @@ AppflowRunFullOperator, ) from airflow.providers.standard.operators.bash import BashOperator +from system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() diff --git a/providers/amazon/tests/system/amazon/aws/example_appflow_run.py b/providers/amazon/tests/system/amazon/aws/example_appflow_run.py index c372da03c35ae..52b048c41554a 100644 --- a/providers/amazon/tests/system/amazon/aws/example_appflow_run.py +++ b/providers/amazon/tests/system/amazon/aws/example_appflow_run.py @@ -20,7 +20,6 @@ from datetime import datetime import boto3 -from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder from airflow.decorators import task from airflow.models.baseoperator import chain @@ -34,6 +33,7 @@ S3DeleteBucketOperator, ) from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() diff --git a/providers/amazon/tests/system/amazon/aws/example_athena.py b/providers/amazon/tests/system/amazon/aws/example_athena.py index bd26cbb5a8f8f..b49df897c1bd2 100644 --- a/providers/amazon/tests/system/amazon/aws/example_athena.py +++ b/providers/amazon/tests/system/amazon/aws/example_athena.py @@ -19,7 +19,6 @@ from datetime import datetime import boto3 -from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder from airflow.decorators import task from airflow.models.baseoperator import chain @@ -33,6 +32,7 @@ ) from airflow.providers.amazon.aws.sensors.athena import AthenaSensor from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() diff --git a/providers/amazon/tests/system/amazon/aws/example_azure_blob_to_s3.py b/providers/amazon/tests/system/amazon/aws/example_azure_blob_to_s3.py index 6e5338298ccad..0374faf2441b4 100644 --- a/providers/amazon/tests/system/amazon/aws/example_azure_blob_to_s3.py +++ b/providers/amazon/tests/system/amazon/aws/example_azure_blob_to_s3.py @@ -18,13 +18,12 @@ from datetime import datetime -from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder - from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.azure_blob_to_s3 import AzureBlobStorageToS3Operator from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() diff --git a/providers/amazon/tests/system/amazon/aws/example_batch.py b/providers/amazon/tests/system/amazon/aws/example_batch.py index e512180221d16..df1021ac2989f 100644 --- a/providers/amazon/tests/system/amazon/aws/example_batch.py +++ b/providers/amazon/tests/system/amazon/aws/example_batch.py @@ -20,12 +20,6 @@ from datetime import datetime import boto3 -from providers.amazon.tests.system.amazon.aws.utils import ( - ENV_ID_KEY, - SystemTestContextBuilder, - prune_logs, - split_string, -) from airflow.decorators import task from airflow.models.baseoperator import chain @@ -38,6 +32,12 @@ BatchSensor, ) from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import ( + ENV_ID_KEY, + SystemTestContextBuilder, + prune_logs, + split_string, +) log = logging.getLogger(__name__) diff --git a/providers/amazon/tests/system/amazon/aws/example_bedrock.py b/providers/amazon/tests/system/amazon/aws/example_bedrock.py index 9a0898e41b442..98b957b46e71c 100644 --- a/providers/amazon/tests/system/amazon/aws/example_bedrock.py +++ b/providers/amazon/tests/system/amazon/aws/example_bedrock.py @@ -21,7 +21,6 @@ from os import environ import boto3 -from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder from airflow.decorators import task, task_group from airflow.models.baseoperator import chain @@ -44,6 +43,7 @@ from airflow.providers.standard.operators.empty import EmptyOperator from airflow.utils.edgemodifier import Label from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import SystemTestContextBuilder # Externally fetched variables: ROLE_ARN_KEY = "ROLE_ARN" diff --git a/providers/amazon/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py b/providers/amazon/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py index cec67ec58423f..dd876c5bd86ff 100644 --- a/providers/amazon/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py +++ b/providers/amazon/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py @@ -32,7 +32,6 @@ OpenSearch, RequestsHttpConnection, ) -from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder from airflow import DAG from airflow.decorators import task, task_group @@ -61,6 +60,7 @@ from airflow.providers.standard.operators.empty import EmptyOperator from airflow.utils.edgemodifier import Label from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import SystemTestContextBuilder ####################################################################### # NOTE: diff --git a/providers/amazon/tests/system/amazon/aws/example_cloudformation.py b/providers/amazon/tests/system/amazon/aws/example_cloudformation.py index 3066ca2e7e850..4897450837c11 100644 --- a/providers/amazon/tests/system/amazon/aws/example_cloudformation.py +++ b/providers/amazon/tests/system/amazon/aws/example_cloudformation.py @@ -19,8 +19,6 @@ import json from datetime import datetime -from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder - from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.cloud_formation import ( @@ -32,6 +30,7 @@ CloudFormationDeleteStackSensor, ) from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() diff --git a/providers/amazon/tests/system/amazon/aws/example_comprehend.py b/providers/amazon/tests/system/amazon/aws/example_comprehend.py index 75d21b4cd0981..df1a11ff84c02 100644 --- a/providers/amazon/tests/system/amazon/aws/example_comprehend.py +++ b/providers/amazon/tests/system/amazon/aws/example_comprehend.py @@ -19,8 +19,6 @@ import json from datetime import datetime -from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder - from airflow import DAG from airflow.decorators import task_group from airflow.models.baseoperator import chain @@ -34,6 +32,7 @@ ComprehendStartPiiEntitiesDetectionJobCompletedSensor, ) from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import SystemTestContextBuilder ROLE_ARN_KEY = "ROLE_ARN" sys_test_context_task = SystemTestContextBuilder().add_variable(ROLE_ARN_KEY).build() diff --git a/providers/amazon/tests/system/amazon/aws/example_comprehend_document_classifier.py b/providers/amazon/tests/system/amazon/aws/example_comprehend_document_classifier.py index 10dc8edadaca9..c31354c82f0c1 100644 --- a/providers/amazon/tests/system/amazon/aws/example_comprehend_document_classifier.py +++ b/providers/amazon/tests/system/amazon/aws/example_comprehend_document_classifier.py @@ -18,8 +18,6 @@ from datetime import datetime -from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder - from airflow import DAG from airflow.decorators import task, task_group from airflow.models.baseoperator import chain @@ -37,6 +35,7 @@ ComprehendCreateDocumentClassifierCompletedSensor, ) from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import SystemTestContextBuilder ROLE_ARN_KEY = "ROLE_ARN" BUCKET_NAME_KEY = "BUCKET_NAME" diff --git a/providers/amazon/tests/system/amazon/aws/example_datasync.py b/providers/amazon/tests/system/amazon/aws/example_datasync.py index 60457b2c24cc9..7470ebf8c341d 100644 --- a/providers/amazon/tests/system/amazon/aws/example_datasync.py +++ b/providers/amazon/tests/system/amazon/aws/example_datasync.py @@ -19,7 +19,6 @@ from datetime import datetime import boto3 -from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder from airflow.decorators import task from airflow.models.baseoperator import chain @@ -27,6 +26,7 @@ from airflow.providers.amazon.aws.operators.datasync import DataSyncOperator from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_datasync" diff --git a/providers/amazon/tests/system/amazon/aws/example_dms.py b/providers/amazon/tests/system/amazon/aws/example_dms.py index 2f52584f21016..0265c88543496 100644 --- a/providers/amazon/tests/system/amazon/aws/example_dms.py +++ b/providers/amazon/tests/system/amazon/aws/example_dms.py @@ -27,8 +27,6 @@ from typing import cast import boto3 -from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder -from providers.amazon.tests.system.amazon.aws.utils.ec2 import get_default_vpc_id from sqlalchemy import Column, MetaData, String, Table, create_engine from airflow.decorators import task @@ -48,6 +46,8 @@ from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.sensors.dms import DmsTaskBaseSensor, DmsTaskCompletedSensor from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder +from system.amazon.aws.utils.ec2 import get_default_vpc_id DAG_ID = "example_dms" ROLE_ARN_KEY = "ROLE_ARN" diff --git a/providers/amazon/tests/system/amazon/aws/example_dms_serverless.py b/providers/amazon/tests/system/amazon/aws/example_dms_serverless.py index 644c73551d228..6144bbcb54dd6 100644 --- a/providers/amazon/tests/system/amazon/aws/example_dms_serverless.py +++ b/providers/amazon/tests/system/amazon/aws/example_dms_serverless.py @@ -26,7 +26,6 @@ from datetime import datetime import boto3 -from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder from sqlalchemy import Column, MetaData, String, Table, create_engine from airflow.decorators import task @@ -44,6 +43,7 @@ ) from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder """ This example demonstrates how to use the DMS operators to create a serverless replication task to replicate data diff --git a/providers/amazon/tests/system/amazon/aws/example_dynamodb.py b/providers/amazon/tests/system/amazon/aws/example_dynamodb.py index f7c08f55ef913..af2573130a39e 100644 --- a/providers/amazon/tests/system/amazon/aws/example_dynamodb.py +++ b/providers/amazon/tests/system/amazon/aws/example_dynamodb.py @@ -19,13 +19,13 @@ from datetime import datetime import boto3 -from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder from airflow.decorators import task from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.sensors.dynamodb import DynamoDBValueSensor from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder # TODO: FIXME The argument types here seems somewhat tricky to fix # mypy: disable-error-code="arg-type" diff --git a/providers/amazon/tests/system/amazon/aws/example_dynamodb_to_s3.py b/providers/amazon/tests/system/amazon/aws/example_dynamodb_to_s3.py index 58439869031d5..3985e40dd00f6 100644 --- a/providers/amazon/tests/system/amazon/aws/example_dynamodb_to_s3.py +++ b/providers/amazon/tests/system/amazon/aws/example_dynamodb_to_s3.py @@ -21,7 +21,6 @@ import boto3 import tenacity -from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder from tenacity import before_log, before_sleep_log from airflow.decorators import task, task_group @@ -32,6 +31,7 @@ from airflow.providers.standard.operators.empty import EmptyOperator from airflow.utils.edgemodifier import Label from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder log = logging.getLogger(__name__) diff --git a/providers/amazon/tests/system/amazon/aws/example_ec2.py b/providers/amazon/tests/system/amazon/aws/example_ec2.py index ca366f8bf6c83..a89a801fd6ebf 100644 --- a/providers/amazon/tests/system/amazon/aws/example_ec2.py +++ b/providers/amazon/tests/system/amazon/aws/example_ec2.py @@ -20,7 +20,6 @@ from operator import itemgetter import boto3 -from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder from airflow.decorators import task from airflow.models.baseoperator import chain @@ -35,6 +34,7 @@ ) from airflow.providers.amazon.aws.sensors.ec2 import EC2InstanceStateSensor from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_ec2" diff --git a/providers/amazon/tests/system/amazon/aws/example_ecs.py b/providers/amazon/tests/system/amazon/aws/example_ecs.py index e8fec4600cae5..e2f8563285e91 100644 --- a/providers/amazon/tests/system/amazon/aws/example_ecs.py +++ b/providers/amazon/tests/system/amazon/aws/example_ecs.py @@ -19,7 +19,6 @@ from datetime import datetime import boto3 -from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder from airflow.decorators import task from airflow.models.baseoperator import chain @@ -37,6 +36,7 @@ EcsTaskDefinitionStateSensor, ) from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_ecs" diff --git a/providers/amazon/tests/system/amazon/aws/example_ecs_fargate.py b/providers/amazon/tests/system/amazon/aws/example_ecs_fargate.py index 5204e96b1f1a0..162ecb29d3a16 100644 --- a/providers/amazon/tests/system/amazon/aws/example_ecs_fargate.py +++ b/providers/amazon/tests/system/amazon/aws/example_ecs_fargate.py @@ -19,7 +19,6 @@ from datetime import datetime import boto3 -from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder from airflow.decorators import task from airflow.models.baseoperator import chain @@ -28,6 +27,7 @@ from airflow.providers.amazon.aws.operators.ecs import EcsRunTaskOperator from airflow.providers.amazon.aws.sensors.ecs import EcsTaskStateSensor from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_ecs_fargate" diff --git a/providers/amazon/tests/system/amazon/aws/example_eks_templated.py b/providers/amazon/tests/system/amazon/aws/example_eks_templated.py index eab98d80f1966..8d0788caf25f7 100644 --- a/providers/amazon/tests/system/amazon/aws/example_eks_templated.py +++ b/providers/amazon/tests/system/amazon/aws/example_eks_templated.py @@ -18,8 +18,6 @@ from datetime import datetime -from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder - from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.hooks.eks import ClusterStates, NodegroupStates @@ -31,6 +29,7 @@ EksPodOperator, ) from airflow.providers.amazon.aws.sensors.eks import EksClusterStateSensor, EksNodegroupStateSensor +from system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() diff --git a/providers/amazon/tests/system/amazon/aws/example_eks_with_fargate_in_one_step.py b/providers/amazon/tests/system/amazon/aws/example_eks_with_fargate_in_one_step.py index 003cd34f7ae86..91b12d7de9a50 100644 --- a/providers/amazon/tests/system/amazon/aws/example_eks_with_fargate_in_one_step.py +++ b/providers/amazon/tests/system/amazon/aws/example_eks_with_fargate_in_one_step.py @@ -18,9 +18,6 @@ from datetime import datetime -from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder -from providers.amazon.tests.system.amazon.aws.utils.k8s import get_describe_pod_operator - from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.hooks.eks import ClusterStates, FargateProfileStates @@ -31,6 +28,8 @@ ) from airflow.providers.amazon.aws.sensors.eks import EksClusterStateSensor, EksFargateProfileStateSensor from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder +from system.amazon.aws.utils.k8s import get_describe_pod_operator DAG_ID = "example_eks_with_fargate_in_one_step" diff --git a/providers/amazon/tests/system/amazon/aws/example_eks_with_fargate_profile.py b/providers/amazon/tests/system/amazon/aws/example_eks_with_fargate_profile.py index 0887b3494c30d..6723ebd7914cb 100644 --- a/providers/amazon/tests/system/amazon/aws/example_eks_with_fargate_profile.py +++ b/providers/amazon/tests/system/amazon/aws/example_eks_with_fargate_profile.py @@ -18,9 +18,6 @@ from datetime import datetime -from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder -from providers.amazon.tests.system.amazon.aws.utils.k8s import get_describe_pod_operator - from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.hooks.eks import ClusterStates, FargateProfileStates @@ -33,6 +30,8 @@ ) from airflow.providers.amazon.aws.sensors.eks import EksClusterStateSensor, EksFargateProfileStateSensor from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder +from system.amazon.aws.utils.k8s import get_describe_pod_operator DAG_ID = "example_eks_with_fargate_profile" diff --git a/providers/amazon/tests/system/amazon/aws/example_eks_with_nodegroup_in_one_step.py b/providers/amazon/tests/system/amazon/aws/example_eks_with_nodegroup_in_one_step.py index 3b11019c38221..b941335cf6f06 100644 --- a/providers/amazon/tests/system/amazon/aws/example_eks_with_nodegroup_in_one_step.py +++ b/providers/amazon/tests/system/amazon/aws/example_eks_with_nodegroup_in_one_step.py @@ -19,8 +19,6 @@ from datetime import datetime import boto3 -from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder -from providers.amazon.tests.system.amazon.aws.utils.k8s import get_describe_pod_operator from airflow.decorators import task from airflow.models.baseoperator import chain @@ -33,6 +31,8 @@ ) from airflow.providers.amazon.aws.sensors.eks import EksClusterStateSensor, EksNodegroupStateSensor from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder +from system.amazon.aws.utils.k8s import get_describe_pod_operator DAG_ID = "example_eks_with_nodegroup_in_one_step" diff --git a/providers/amazon/tests/system/amazon/aws/example_eks_with_nodegroups.py b/providers/amazon/tests/system/amazon/aws/example_eks_with_nodegroups.py index 1151b15991dc1..d52e64925e772 100644 --- a/providers/amazon/tests/system/amazon/aws/example_eks_with_nodegroups.py +++ b/providers/amazon/tests/system/amazon/aws/example_eks_with_nodegroups.py @@ -19,8 +19,6 @@ from datetime import datetime import boto3 -from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder -from providers.amazon.tests.system.amazon.aws.utils.k8s import get_describe_pod_operator from airflow.decorators import task from airflow.models.baseoperator import chain @@ -35,6 +33,8 @@ ) from airflow.providers.amazon.aws.sensors.eks import EksClusterStateSensor, EksNodegroupStateSensor from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder +from system.amazon.aws.utils.k8s import get_describe_pod_operator DAG_ID = "example_eks_with_nodegroups" diff --git a/providers/amazon/tests/system/amazon/aws/example_emr.py b/providers/amazon/tests/system/amazon/aws/example_emr.py index 62dc8d2128d9b..7863010fac709 100644 --- a/providers/amazon/tests/system/amazon/aws/example_emr.py +++ b/providers/amazon/tests/system/amazon/aws/example_emr.py @@ -23,7 +23,6 @@ from typing import Any import boto3 -from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder from airflow.decorators import task from airflow.models.baseoperator import chain @@ -37,6 +36,7 @@ from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.sensors.emr import EmrJobFlowSensor, EmrStepSensor from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_emr" CONFIG_NAME = "EMR Runtime Role Security Configuration" diff --git a/providers/amazon/tests/system/amazon/aws/example_emr_eks.py b/providers/amazon/tests/system/amazon/aws/example_emr_eks.py index 4254f8a0c6ba1..937097a416e5f 100644 --- a/providers/amazon/tests/system/amazon/aws/example_emr_eks.py +++ b/providers/amazon/tests/system/amazon/aws/example_emr_eks.py @@ -21,7 +21,6 @@ from datetime import datetime import boto3 -from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder from airflow.decorators import task from airflow.models.baseoperator import chain @@ -37,6 +36,7 @@ from airflow.providers.amazon.aws.sensors.eks import EksClusterStateSensor, EksNodegroupStateSensor from airflow.providers.amazon.aws.sensors.emr import EmrContainerSensor from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_emr_eks" diff --git a/providers/amazon/tests/system/amazon/aws/example_emr_notebook_execution.py b/providers/amazon/tests/system/amazon/aws/example_emr_notebook_execution.py index f145a10debd79..2b376b3d2a57c 100644 --- a/providers/amazon/tests/system/amazon/aws/example_emr_notebook_execution.py +++ b/providers/amazon/tests/system/amazon/aws/example_emr_notebook_execution.py @@ -20,8 +20,6 @@ from datetime import datetime -from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder - from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.emr import ( @@ -29,6 +27,7 @@ EmrStopNotebookExecutionOperator, ) from airflow.providers.amazon.aws.sensors.emr import EmrNotebookExecutionSensor +from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_emr_notebook" # Externally fetched variables: diff --git a/providers/amazon/tests/system/amazon/aws/example_emr_serverless.py b/providers/amazon/tests/system/amazon/aws/example_emr_serverless.py index f15889fedbb91..ce51ee3577ecf 100644 --- a/providers/amazon/tests/system/amazon/aws/example_emr_serverless.py +++ b/providers/amazon/tests/system/amazon/aws/example_emr_serverless.py @@ -19,7 +19,6 @@ from datetime import datetime import boto3 -from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder from airflow.models.baseoperator import chain from airflow.models.dag import DAG @@ -33,6 +32,7 @@ from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.sensors.emr import EmrServerlessApplicationSensor, EmrServerlessJobSensor from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_emr_serverless" diff --git a/providers/amazon/tests/system/amazon/aws/example_eventbridge.py b/providers/amazon/tests/system/amazon/aws/example_eventbridge.py index cd6d7905b0d15..5dd717f3f9f90 100644 --- a/providers/amazon/tests/system/amazon/aws/example_eventbridge.py +++ b/providers/amazon/tests/system/amazon/aws/example_eventbridge.py @@ -18,8 +18,6 @@ from datetime import datetime -from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder - from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.eventbridge import ( @@ -28,6 +26,7 @@ EventBridgePutEventsOperator, EventBridgePutRuleOperator, ) +from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_eventbridge" ENTRIES = [ diff --git a/providers/amazon/tests/system/amazon/aws/example_ftp_to_s3.py b/providers/amazon/tests/system/amazon/aws/example_ftp_to_s3.py index 34373229f9ae6..60e5a75df1617 100644 --- a/providers/amazon/tests/system/amazon/aws/example_ftp_to_s3.py +++ b/providers/amazon/tests/system/amazon/aws/example_ftp_to_s3.py @@ -18,13 +18,12 @@ from datetime import datetime -from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder - from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.ftp_to_s3 import FTPToS3Operator from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() diff --git a/providers/amazon/tests/system/amazon/aws/example_gcs_to_s3.py b/providers/amazon/tests/system/amazon/aws/example_gcs_to_s3.py index 96b73671e044b..51db7a7e1d2a3 100644 --- a/providers/amazon/tests/system/amazon/aws/example_gcs_to_s3.py +++ b/providers/amazon/tests/system/amazon/aws/example_gcs_to_s3.py @@ -18,8 +18,6 @@ from datetime import datetime -from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder - from airflow.decorators import task from airflow.models.baseoperator import chain from airflow.models.dag import DAG @@ -34,6 +32,7 @@ GCSDeleteBucketOperator, ) from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import SystemTestContextBuilder # Externally fetched variables: GCP_PROJECT_ID = "GCP_PROJECT_ID" diff --git a/providers/amazon/tests/system/amazon/aws/example_glacier_to_gcs.py b/providers/amazon/tests/system/amazon/aws/example_glacier_to_gcs.py index 4da7f5d7be7cc..0d0dfe95c6a28 100644 --- a/providers/amazon/tests/system/amazon/aws/example_glacier_to_gcs.py +++ b/providers/amazon/tests/system/amazon/aws/example_glacier_to_gcs.py @@ -19,7 +19,6 @@ from datetime import datetime import boto3 -from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder from airflow.decorators import task from airflow.models.baseoperator import chain @@ -31,6 +30,7 @@ from airflow.providers.amazon.aws.sensors.glacier import GlacierJobOperationSensor from airflow.providers.amazon.aws.transfers.glacier_to_gcs import GlacierToGCSOperator from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() diff --git a/providers/amazon/tests/system/amazon/aws/example_glue.py b/providers/amazon/tests/system/amazon/aws/example_glue.py index a7fa18eaa7942..c7681e3f38162 100644 --- a/providers/amazon/tests/system/amazon/aws/example_glue.py +++ b/providers/amazon/tests/system/amazon/aws/example_glue.py @@ -20,7 +20,6 @@ from typing import TYPE_CHECKING import boto3 -from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder, prune_logs from airflow.decorators import task from airflow.models.baseoperator import chain @@ -36,6 +35,7 @@ from airflow.providers.amazon.aws.sensors.glue_catalog_partition import GlueCatalogPartitionSensor from airflow.providers.amazon.aws.sensors.glue_crawler import GlueCrawlerSensor from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder, prune_logs if TYPE_CHECKING: from botocore.client import BaseClient diff --git a/providers/amazon/tests/system/amazon/aws/example_glue_data_quality.py b/providers/amazon/tests/system/amazon/aws/example_glue_data_quality.py index 941d7232ae165..6a8fd75440d6f 100644 --- a/providers/amazon/tests/system/amazon/aws/example_glue_data_quality.py +++ b/providers/amazon/tests/system/amazon/aws/example_glue_data_quality.py @@ -18,8 +18,6 @@ from datetime import datetime -from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder - from airflow import DAG from airflow.decorators import task, task_group from airflow.models.baseoperator import chain @@ -36,6 +34,7 @@ ) from airflow.providers.amazon.aws.sensors.glue import GlueDataQualityRuleSetEvaluationRunSensor from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import SystemTestContextBuilder ROLE_ARN_KEY = "ROLE_ARN" sys_test_context_task = SystemTestContextBuilder().add_variable(ROLE_ARN_KEY).build() diff --git a/providers/amazon/tests/system/amazon/aws/example_glue_data_quality_with_recommendation.py b/providers/amazon/tests/system/amazon/aws/example_glue_data_quality_with_recommendation.py index 6757dd2e615f6..65e05e3eb3614 100644 --- a/providers/amazon/tests/system/amazon/aws/example_glue_data_quality_with_recommendation.py +++ b/providers/amazon/tests/system/amazon/aws/example_glue_data_quality_with_recommendation.py @@ -18,8 +18,6 @@ from datetime import datetime -from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder - from airflow import DAG from airflow.decorators import task, task_group from airflow.models.baseoperator import chain @@ -39,6 +37,7 @@ GlueDataQualityRuleSetEvaluationRunSensor, ) from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import SystemTestContextBuilder ROLE_ARN_KEY = "ROLE_ARN" sys_test_context_task = SystemTestContextBuilder().add_variable(ROLE_ARN_KEY).build() diff --git a/providers/amazon/tests/system/amazon/aws/example_glue_databrew.py b/providers/amazon/tests/system/amazon/aws/example_glue_databrew.py index 2460d762e8984..1c214041164d0 100644 --- a/providers/amazon/tests/system/amazon/aws/example_glue_databrew.py +++ b/providers/amazon/tests/system/amazon/aws/example_glue_databrew.py @@ -18,7 +18,6 @@ import boto3 import pendulum -from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder from airflow.decorators import task from airflow.models.baseoperator import chain @@ -32,6 +31,7 @@ S3DeleteBucketOperator, ) from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import SystemTestContextBuilder DAG_ID = "example_glue_databrew" diff --git a/providers/amazon/tests/system/amazon/aws/example_google_api_sheets_to_s3.py b/providers/amazon/tests/system/amazon/aws/example_google_api_sheets_to_s3.py index 38db6f178b27a..8794645590b56 100644 --- a/providers/amazon/tests/system/amazon/aws/example_google_api_sheets_to_s3.py +++ b/providers/amazon/tests/system/amazon/aws/example_google_api_sheets_to_s3.py @@ -24,13 +24,12 @@ import os from datetime import datetime -from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder - from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.google_api_to_s3 import GoogleApiToS3Operator from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() diff --git a/providers/amazon/tests/system/amazon/aws/example_google_api_youtube_to_s3.py b/providers/amazon/tests/system/amazon/aws/example_google_api_youtube_to_s3.py index bc4afe636b14f..b4f47efc47096 100644 --- a/providers/amazon/tests/system/amazon/aws/example_google_api_youtube_to_s3.py +++ b/providers/amazon/tests/system/amazon/aws/example_google_api_youtube_to_s3.py @@ -52,7 +52,6 @@ from datetime import datetime import boto3 -from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder from airflow import settings from airflow.decorators import task @@ -62,6 +61,7 @@ from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.google_api_to_s3 import GoogleApiToS3Operator from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_google_api_youtube_to_s3" diff --git a/providers/amazon/tests/system/amazon/aws/example_hive_to_dynamodb.py b/providers/amazon/tests/system/amazon/aws/example_hive_to_dynamodb.py index fe0d5ecd27fc3..5b88597362268 100644 --- a/providers/amazon/tests/system/amazon/aws/example_hive_to_dynamodb.py +++ b/providers/amazon/tests/system/amazon/aws/example_hive_to_dynamodb.py @@ -24,8 +24,6 @@ from datetime import datetime -from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder - from airflow.decorators import task from airflow.models import Connection from airflow.models.baseoperator import chain @@ -34,6 +32,7 @@ from airflow.providers.amazon.aws.transfers.hive_to_dynamodb import HiveToDynamoDBOperator from airflow.utils import db from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import SystemTestContextBuilder DAG_ID = "example_hive_to_dynamodb" diff --git a/providers/amazon/tests/system/amazon/aws/example_http_to_s3.py b/providers/amazon/tests/system/amazon/aws/example_http_to_s3.py index 51ac0ce2480b4..0ffb292d98ce3 100644 --- a/providers/amazon/tests/system/amazon/aws/example_http_to_s3.py +++ b/providers/amazon/tests/system/amazon/aws/example_http_to_s3.py @@ -18,8 +18,6 @@ from datetime import datetime -from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder - from airflow import settings from airflow.decorators import task from airflow.models.baseoperator import chain @@ -29,6 +27,7 @@ from airflow.providers.amazon.aws.transfers.http_to_s3 import HttpToS3Operator from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() diff --git a/providers/amazon/tests/system/amazon/aws/example_imap_attachment_to_s3.py b/providers/amazon/tests/system/amazon/aws/example_imap_attachment_to_s3.py index 94ecf88ef010b..7489a03304390 100644 --- a/providers/amazon/tests/system/amazon/aws/example_imap_attachment_to_s3.py +++ b/providers/amazon/tests/system/amazon/aws/example_imap_attachment_to_s3.py @@ -23,13 +23,12 @@ from datetime import datetime -from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder - from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.imap_attachment_to_s3 import ImapAttachmentToS3Operator from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import SystemTestContextBuilder DAG_ID = "example_imap_attachment_to_s3" diff --git a/providers/amazon/tests/system/amazon/aws/example_kinesis_analytics.py b/providers/amazon/tests/system/amazon/aws/example_kinesis_analytics.py index f49862084ac1b..9bb52939b0945 100644 --- a/providers/amazon/tests/system/amazon/aws/example_kinesis_analytics.py +++ b/providers/amazon/tests/system/amazon/aws/example_kinesis_analytics.py @@ -22,7 +22,6 @@ from datetime import datetime import boto3 -from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder from airflow import DAG, settings from airflow.decorators import task, task_group @@ -44,6 +43,7 @@ ) from airflow.providers.amazon.aws.transfers.http_to_s3 import HttpToS3Operator from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import SystemTestContextBuilder ROLE_ARN_KEY = "ROLE_ARN" sys_test_context_task = SystemTestContextBuilder().add_variable(ROLE_ARN_KEY).build() diff --git a/providers/amazon/tests/system/amazon/aws/example_lambda.py b/providers/amazon/tests/system/amazon/aws/example_lambda.py index 9a1a4d23cfa55..12e5fe8650d32 100644 --- a/providers/amazon/tests/system/amazon/aws/example_lambda.py +++ b/providers/amazon/tests/system/amazon/aws/example_lambda.py @@ -22,7 +22,6 @@ from io import BytesIO import boto3 -from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder, prune_logs from airflow.decorators import task from airflow.models.baseoperator import chain @@ -33,6 +32,7 @@ ) from airflow.providers.amazon.aws.sensors.lambda_function import LambdaFunctionStateSensor from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder, prune_logs DAG_ID = "example_lambda" diff --git a/providers/amazon/tests/system/amazon/aws/example_local_to_s3.py b/providers/amazon/tests/system/amazon/aws/example_local_to_s3.py index 69498f0055354..d172c9235c3fb 100644 --- a/providers/amazon/tests/system/amazon/aws/example_local_to_s3.py +++ b/providers/amazon/tests/system/amazon/aws/example_local_to_s3.py @@ -19,14 +19,13 @@ import os from datetime import datetime -from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder - from airflow.decorators import task from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.local_to_s3 import LocalFilesystemToS3Operator from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() diff --git a/providers/amazon/tests/system/amazon/aws/example_mongo_to_s3.py b/providers/amazon/tests/system/amazon/aws/example_mongo_to_s3.py index ddfb20e2e84c4..242e2afcbc865 100644 --- a/providers/amazon/tests/system/amazon/aws/example_mongo_to_s3.py +++ b/providers/amazon/tests/system/amazon/aws/example_mongo_to_s3.py @@ -16,14 +16,13 @@ # under the License. from __future__ import annotations -from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder - from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.mongo_to_s3 import MongoToS3Operator from airflow.utils.timezone import datetime from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import SystemTestContextBuilder DAG_ID = "example_mongo_to_s3" diff --git a/providers/amazon/tests/system/amazon/aws/example_mwaa.py b/providers/amazon/tests/system/amazon/aws/example_mwaa.py index 5f178019f64e1..01fd1afcbb83c 100644 --- a/providers/amazon/tests/system/amazon/aws/example_mwaa.py +++ b/providers/amazon/tests/system/amazon/aws/example_mwaa.py @@ -18,11 +18,10 @@ from datetime import datetime -from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder - from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.mwaa import MwaaTriggerDagRunOperator +from system.amazon.aws.utils import SystemTestContextBuilder DAG_ID = "example_mwaa" diff --git a/providers/amazon/tests/system/amazon/aws/example_neptune.py b/providers/amazon/tests/system/amazon/aws/example_neptune.py index 4807635c2b3de..cfc6e16d4da0e 100644 --- a/providers/amazon/tests/system/amazon/aws/example_neptune.py +++ b/providers/amazon/tests/system/amazon/aws/example_neptune.py @@ -18,8 +18,6 @@ from datetime import datetime -from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder - from airflow.decorators import task from airflow.models.baseoperator import chain from airflow.models.dag import DAG @@ -28,6 +26,7 @@ NeptuneStartDbClusterOperator, NeptuneStopDbClusterOperator, ) +from system.amazon.aws.utils import SystemTestContextBuilder DAG_ID = "example_neptune" diff --git a/providers/amazon/tests/system/amazon/aws/example_quicksight.py b/providers/amazon/tests/system/amazon/aws/example_quicksight.py index 8f5e2036d3a2c..9662f75537a3a 100644 --- a/providers/amazon/tests/system/amazon/aws/example_quicksight.py +++ b/providers/amazon/tests/system/amazon/aws/example_quicksight.py @@ -21,7 +21,6 @@ from datetime import datetime import boto3 -from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder from airflow.decorators import task from airflow.models.baseoperator import chain @@ -34,6 +33,7 @@ ) from airflow.providers.amazon.aws.sensors.quicksight import QuickSightSensor from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder """ Prerequisites: diff --git a/providers/amazon/tests/system/amazon/aws/example_rds_event.py b/providers/amazon/tests/system/amazon/aws/example_rds_event.py index 79bd27c680f43..bab9862a0f01a 100644 --- a/providers/amazon/tests/system/amazon/aws/example_rds_event.py +++ b/providers/amazon/tests/system/amazon/aws/example_rds_event.py @@ -20,7 +20,6 @@ from datetime import datetime import boto3 -from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder from airflow.decorators import task from airflow.models.baseoperator import chain @@ -32,6 +31,7 @@ RdsDeleteEventSubscriptionOperator, ) from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_rds_event" diff --git a/providers/amazon/tests/system/amazon/aws/example_rds_export.py b/providers/amazon/tests/system/amazon/aws/example_rds_export.py index c30096944f3a0..4921eabd86bac 100644 --- a/providers/amazon/tests/system/amazon/aws/example_rds_export.py +++ b/providers/amazon/tests/system/amazon/aws/example_rds_export.py @@ -18,8 +18,6 @@ from datetime import datetime -from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder - from airflow.decorators import task from airflow.models.baseoperator import chain from airflow.models.dag import DAG @@ -35,6 +33,7 @@ from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.sensors.rds import RdsExportTaskExistenceSensor, RdsSnapshotExistenceSensor from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_rds_export" diff --git a/providers/amazon/tests/system/amazon/aws/example_rds_instance.py b/providers/amazon/tests/system/amazon/aws/example_rds_instance.py index 32e5304142e26..2997fe4c83036 100644 --- a/providers/amazon/tests/system/amazon/aws/example_rds_instance.py +++ b/providers/amazon/tests/system/amazon/aws/example_rds_instance.py @@ -18,8 +18,6 @@ from datetime import datetime -from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder - from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.rds import ( @@ -30,6 +28,7 @@ ) from airflow.providers.amazon.aws.sensors.rds import RdsDbSensor from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() diff --git a/providers/amazon/tests/system/amazon/aws/example_rds_snapshot.py b/providers/amazon/tests/system/amazon/aws/example_rds_snapshot.py index d4cdc1ec9d887..367015368ef9f 100644 --- a/providers/amazon/tests/system/amazon/aws/example_rds_snapshot.py +++ b/providers/amazon/tests/system/amazon/aws/example_rds_snapshot.py @@ -18,8 +18,6 @@ from datetime import datetime -from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder - from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.rds import ( @@ -31,6 +29,7 @@ ) from airflow.providers.amazon.aws.sensors.rds import RdsSnapshotExistenceSensor from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_rds_snapshot" diff --git a/providers/amazon/tests/system/amazon/aws/example_redshift.py b/providers/amazon/tests/system/amazon/aws/example_redshift.py index 1b3d139c711b9..0d26b12ee3195 100644 --- a/providers/amazon/tests/system/amazon/aws/example_redshift.py +++ b/providers/amazon/tests/system/amazon/aws/example_redshift.py @@ -20,8 +20,6 @@ from datetime import datetime -from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder - from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.redshift_cluster import ( @@ -35,6 +33,7 @@ from airflow.providers.amazon.aws.operators.redshift_data import RedshiftDataOperator from airflow.providers.amazon.aws.sensors.redshift_cluster import RedshiftClusterSensor from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_redshift" diff --git a/providers/amazon/tests/system/amazon/aws/example_redshift_s3_transfers.py b/providers/amazon/tests/system/amazon/aws/example_redshift_s3_transfers.py index b9f337282dab7..717f6b423e579 100644 --- a/providers/amazon/tests/system/amazon/aws/example_redshift_s3_transfers.py +++ b/providers/amazon/tests/system/amazon/aws/example_redshift_s3_transfers.py @@ -18,8 +18,6 @@ from datetime import datetime -from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder - from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.redshift_cluster import ( @@ -37,6 +35,7 @@ from airflow.providers.amazon.aws.transfers.redshift_to_s3 import RedshiftToS3Operator from airflow.providers.amazon.aws.transfers.s3_to_redshift import S3ToRedshiftOperator from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_redshift_to_s3" diff --git a/providers/amazon/tests/system/amazon/aws/example_s3.py b/providers/amazon/tests/system/amazon/aws/example_s3.py index a4202cdb7b1c5..57e4ee9fdf372 100644 --- a/providers/amazon/tests/system/amazon/aws/example_s3.py +++ b/providers/amazon/tests/system/amazon/aws/example_s3.py @@ -18,8 +18,6 @@ from datetime import datetime -from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder - from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.s3 import ( @@ -38,6 +36,7 @@ from airflow.providers.amazon.aws.sensors.s3 import S3KeySensor, S3KeysUnchangedSensor from airflow.providers.standard.operators.python import BranchPythonOperator from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_s3" diff --git a/providers/amazon/tests/system/amazon/aws/example_s3_to_dynamodb.py b/providers/amazon/tests/system/amazon/aws/example_s3_to_dynamodb.py index b79829cb4714f..a1f7e1b371570 100644 --- a/providers/amazon/tests/system/amazon/aws/example_s3_to_dynamodb.py +++ b/providers/amazon/tests/system/amazon/aws/example_s3_to_dynamodb.py @@ -20,7 +20,6 @@ from datetime import datetime import boto3 -from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder from airflow.decorators import task from airflow.models.baseoperator import chain @@ -32,6 +31,7 @@ ) from airflow.providers.amazon.aws.transfers.s3_to_dynamodb import S3ToDynamoDBOperator from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder log = logging.getLogger(__name__) diff --git a/providers/amazon/tests/system/amazon/aws/example_s3_to_ftp.py b/providers/amazon/tests/system/amazon/aws/example_s3_to_ftp.py index a723a306644ed..ff5da5afcce28 100644 --- a/providers/amazon/tests/system/amazon/aws/example_s3_to_ftp.py +++ b/providers/amazon/tests/system/amazon/aws/example_s3_to_ftp.py @@ -18,13 +18,12 @@ from datetime import datetime -from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder - from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.s3_to_ftp import S3ToFTPOperator from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() diff --git a/providers/amazon/tests/system/amazon/aws/example_s3_to_sftp.py b/providers/amazon/tests/system/amazon/aws/example_s3_to_sftp.py index 74e04698e4e62..e33a6fee04f4a 100644 --- a/providers/amazon/tests/system/amazon/aws/example_s3_to_sftp.py +++ b/providers/amazon/tests/system/amazon/aws/example_s3_to_sftp.py @@ -18,13 +18,12 @@ from datetime import datetime -from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder - from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.s3_to_sftp import S3ToSFTPOperator from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() diff --git a/providers/amazon/tests/system/amazon/aws/example_s3_to_sql.py b/providers/amazon/tests/system/amazon/aws/example_s3_to_sql.py index 8b2c988a3c0ec..1830244181d2a 100644 --- a/providers/amazon/tests/system/amazon/aws/example_s3_to_sql.py +++ b/providers/amazon/tests/system/amazon/aws/example_s3_to_sql.py @@ -18,8 +18,6 @@ from datetime import datetime -from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder - from airflow import settings from airflow.decorators import task from airflow.models import Connection @@ -41,6 +39,7 @@ from airflow.providers.amazon.aws.transfers.s3_to_sql import S3ToSqlOperator from airflow.providers.common.sql.operators.sql import SQLTableCheckOperator from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder from tests_common.test_utils.watcher import watcher diff --git a/providers/amazon/tests/system/amazon/aws/example_sagemaker.py b/providers/amazon/tests/system/amazon/aws/example_sagemaker.py index 214cdf5ea48cd..4c2afb50013f6 100644 --- a/providers/amazon/tests/system/amazon/aws/example_sagemaker.py +++ b/providers/amazon/tests/system/amazon/aws/example_sagemaker.py @@ -24,7 +24,6 @@ from textwrap import dedent import boto3 -from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder, prune_logs from airflow.decorators import task from airflow.models.baseoperator import chain @@ -54,6 +53,7 @@ ) from airflow.providers.standard.operators.python import get_current_context from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder, prune_logs logger = logging.getLogger(__name__) diff --git a/providers/amazon/tests/system/amazon/aws/example_sagemaker_endpoint.py b/providers/amazon/tests/system/amazon/aws/example_sagemaker_endpoint.py index e0aafda6ceac3..cac57f02ac5a4 100644 --- a/providers/amazon/tests/system/amazon/aws/example_sagemaker_endpoint.py +++ b/providers/amazon/tests/system/amazon/aws/example_sagemaker_endpoint.py @@ -20,7 +20,6 @@ from datetime import datetime import boto3 -from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder, prune_logs from airflow.decorators import task from airflow.models.baseoperator import chain @@ -39,6 +38,7 @@ ) from airflow.providers.amazon.aws.sensors.sagemaker import SageMakerEndpointSensor from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder, prune_logs DAG_ID = "example_sagemaker_endpoint" @@ -58,7 +58,7 @@ } # For an example of how to obtain the following train and test data, please see -# https://github.com/apache/airflow/blob/main/providers/tests/system/amazon/aws/example_sagemaker.py +# https://github.com/apache/airflow/blob/main/providers/amazon/tests/system/amazon/aws/example_sagemaker.py TRAIN_DATA = "0,4.9,2.5,4.5,1.7\n1,7.0,3.2,4.7,1.4\n0,7.3,2.9,6.3,1.8\n2,5.1,3.5,1.4,0.2\n" SAMPLE_TEST_DATA = "6.4,3.2,4.5,1.5" diff --git a/providers/amazon/tests/system/amazon/aws/example_sagemaker_notebook.py b/providers/amazon/tests/system/amazon/aws/example_sagemaker_notebook.py index c370111034ef4..13f31b0656e6b 100644 --- a/providers/amazon/tests/system/amazon/aws/example_sagemaker_notebook.py +++ b/providers/amazon/tests/system/amazon/aws/example_sagemaker_notebook.py @@ -18,8 +18,6 @@ from datetime import datetime -from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder - from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.sagemaker import ( @@ -28,6 +26,7 @@ SageMakerStartNoteBookOperator, SageMakerStopNotebookOperator, ) +from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_sagemaker_notebook" diff --git a/providers/amazon/tests/system/amazon/aws/example_sagemaker_pipeline.py b/providers/amazon/tests/system/amazon/aws/example_sagemaker_pipeline.py index d66b62b4a80cc..562f4227edbdf 100644 --- a/providers/amazon/tests/system/amazon/aws/example_sagemaker_pipeline.py +++ b/providers/amazon/tests/system/amazon/aws/example_sagemaker_pipeline.py @@ -19,8 +19,6 @@ from datetime import datetime import boto3 -from providers.amazon.tests.system.amazon.aws.example_sagemaker import delete_experiments -from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder from airflow.decorators import task from airflow.models.baseoperator import chain @@ -33,6 +31,8 @@ SageMakerPipelineSensor, ) from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.example_sagemaker import delete_experiments +from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_sagemaker_pipeline" diff --git a/providers/amazon/tests/system/amazon/aws/example_salesforce_to_s3.py b/providers/amazon/tests/system/amazon/aws/example_salesforce_to_s3.py index b53cf8d00a6d5..62481484fa6e8 100644 --- a/providers/amazon/tests/system/amazon/aws/example_salesforce_to_s3.py +++ b/providers/amazon/tests/system/amazon/aws/example_salesforce_to_s3.py @@ -23,13 +23,12 @@ from datetime import datetime -from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder - from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.salesforce_to_s3 import SalesforceToS3Operator from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() diff --git a/providers/amazon/tests/system/amazon/aws/example_sftp_to_s3.py b/providers/amazon/tests/system/amazon/aws/example_sftp_to_s3.py index 551bbf8efcf5b..df0f951910cbf 100644 --- a/providers/amazon/tests/system/amazon/aws/example_sftp_to_s3.py +++ b/providers/amazon/tests/system/amazon/aws/example_sftp_to_s3.py @@ -18,13 +18,12 @@ from datetime import datetime -from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder - from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.sftp_to_s3 import SFTPToS3Operator from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() diff --git a/providers/amazon/tests/system/amazon/aws/example_sns.py b/providers/amazon/tests/system/amazon/aws/example_sns.py index cf00f109b16da..f66e02ad0d7be 100644 --- a/providers/amazon/tests/system/amazon/aws/example_sns.py +++ b/providers/amazon/tests/system/amazon/aws/example_sns.py @@ -19,13 +19,13 @@ from datetime import datetime import boto3 -from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder from airflow.decorators import task from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.sns import SnsPublishOperator from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() diff --git a/providers/amazon/tests/system/amazon/aws/example_sql_to_s3.py b/providers/amazon/tests/system/amazon/aws/example_sql_to_s3.py index 2d830876ed666..bfce41147c681 100644 --- a/providers/amazon/tests/system/amazon/aws/example_sql_to_s3.py +++ b/providers/amazon/tests/system/amazon/aws/example_sql_to_s3.py @@ -20,8 +20,6 @@ from datetime import datetime -from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder - from airflow import settings from airflow.decorators import task from airflow.models import Connection @@ -37,6 +35,7 @@ from airflow.providers.amazon.aws.sensors.redshift_cluster import RedshiftClusterSensor from airflow.providers.amazon.aws.transfers.sql_to_s3 import SqlToS3Operator from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_sql_to_s3" diff --git a/providers/amazon/tests/system/amazon/aws/example_sqs.py b/providers/amazon/tests/system/amazon/aws/example_sqs.py index 3b49fcc0481fe..4dfe7f216ebd9 100644 --- a/providers/amazon/tests/system/amazon/aws/example_sqs.py +++ b/providers/amazon/tests/system/amazon/aws/example_sqs.py @@ -18,8 +18,6 @@ from datetime import datetime -from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder - from airflow.decorators import task from airflow.models.baseoperator import chain from airflow.models.dag import DAG @@ -27,6 +25,7 @@ from airflow.providers.amazon.aws.operators.sqs import SqsPublishOperator from airflow.providers.amazon.aws.sensors.sqs import SqsSensor from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() diff --git a/providers/amazon/tests/system/amazon/aws/example_step_functions.py b/providers/amazon/tests/system/amazon/aws/example_step_functions.py index 8274af020708a..0ad4d6fe21b61 100644 --- a/providers/amazon/tests/system/amazon/aws/example_step_functions.py +++ b/providers/amazon/tests/system/amazon/aws/example_step_functions.py @@ -19,8 +19,6 @@ import json from datetime import datetime -from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder - from airflow.decorators import task from airflow.models.baseoperator import chain from airflow.models.dag import DAG @@ -30,6 +28,7 @@ StepFunctionStartExecutionOperator, ) from airflow.providers.amazon.aws.sensors.step_function import StepFunctionExecutionSensor +from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_step_functions" diff --git a/providers/amazon/tests/system/amazon/aws/tests/test_aws_auth_manager.py b/providers/amazon/tests/system/amazon/aws/tests/test_aws_auth_manager.py index 00e33fb7dce33..163b4b52b4faf 100644 --- a/providers/amazon/tests/system/amazon/aws/tests/test_aws_auth_manager.py +++ b/providers/amazon/tests/system/amazon/aws/tests/test_aws_auth_manager.py @@ -23,9 +23,9 @@ import pytest from fastapi.testclient import TestClient from onelogin.saml2.idp_metadata_parser import OneLogin_Saml2_IdPMetadataParser -from providers.amazon.tests.system.amazon.aws.utils import set_env_id from airflow.api_fastapi.app import create_app +from system.amazon.aws.utils import set_env_id from tests_common.test_utils.config import conf_vars diff --git a/providers/amazon/tests/system/amazon/aws/utils/__init__.py b/providers/amazon/tests/system/amazon/aws/utils/__init__.py index f385d6ea11ca5..37752b97cfa64 100644 --- a/providers/amazon/tests/system/amazon/aws/utils/__init__.py +++ b/providers/amazon/tests/system/amazon/aws/utils/__init__.py @@ -49,7 +49,7 @@ INVALID_ENV_ID_MSG: str = ( "To maximize compatibility, the SYSTEM_TESTS_ENV_ID must be an alphanumeric string " - "which starts with a letter. Please see `providers/tests/system/amazon/README.md`." + "which starts with a letter. Please see `providers/amazon/tests/system/amazon/README.md`." ) LOWERCASE_ENV_ID_MSG: str = ( "The provided Environment ID contains uppercase letters and " diff --git a/providers/tests/integration/redis/operators/__init__.py b/providers/apache/beam/docs/_api/tests/system/__init__.py similarity index 90% rename from providers/tests/integration/redis/operators/__init__.py rename to providers/apache/beam/docs/_api/tests/system/__init__.py index 13a83393a9124..e8fd22856438c 100644 --- a/providers/tests/integration/redis/operators/__init__.py +++ b/providers/apache/beam/docs/_api/tests/system/__init__.py @@ -14,3 +14,4 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/tests/email/__init__.py b/providers/apache/beam/docs/_api/tests/system/apache/__init__.py similarity index 90% rename from providers/tests/email/__init__.py rename to providers/apache/beam/docs/_api/tests/system/apache/__init__.py index 217e5db960782..e8fd22856438c 100644 --- a/providers/tests/email/__init__.py +++ b/providers/apache/beam/docs/_api/tests/system/apache/__init__.py @@ -1,4 +1,3 @@ -# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information @@ -15,3 +14,4 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/apache/beam/src/airflow/providers/apache/beam/README.md b/providers/apache/beam/src/airflow/providers/apache/beam/README.md index 17564c5bef69d..60a7514fe0fa5 100644 --- a/providers/apache/beam/src/airflow/providers/apache/beam/README.md +++ b/providers/apache/beam/src/airflow/providers/apache/beam/README.md @@ -69,19 +69,19 @@ in [Naming conventions for provider packages](https://github.com/apache/airflow/ ### New operators -| New Airflow 2.0 operators: `airflow.providers.apache.beam` package | -|:---------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.beam.BeamRunJavaPipelineOperator](https://github.com/apache/airflow/blob/main/providers/src/airflow/providers/apache/beam/operators/beam.py) | -| [operators.beam.BeamRunPythonPipelineOperator](https://github.com/apache/airflow/blob/main/providers/src/airflow/providers/apache/beam/operators/beam.py) | +| New Airflow 2.0 operators: `airflow.providers.apache.beam` package | +|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [operators.beam.BeamRunJavaPipelineOperator](https://github.com/apache/airflow/blob/main/providers/apache/beam/src/airflow/providers/apache/beam/operators/beam.py) | +| [operators.beam.BeamRunPythonPipelineOperator](https://github.com/apache/airflow/blob/main/providers/apache/beam/src/airflow/providers/apache/beam/operators/beam.py) | ## Hooks ### New hooks -| New Airflow 2.0 hooks: `airflow.providers.apache.beam` package | -|:-----------------------------------------------------------------------------------------------------------------| -| [hooks.beam.BeamHook](https://github.com/apache/airflow/blob/main/providers/src/airflow/providers/apache/beam/hooks/beam.py) | +| New Airflow 2.0 hooks: `airflow.providers.apache.beam` package | +|:-----------------------------------------------------------------------------------------------------------------------------------------| +| [hooks.beam.BeamHook](https://github.com/apache/airflow/blob/main/providers/apache/beam/src/airflow/providers/apache/beam/hooks/beam.py) | ## Releases diff --git a/providers/apache/beam/tests/conftest.py b/providers/apache/beam/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/apache/beam/tests/conftest.py +++ b/providers/apache/beam/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/tests/email/operators/__init__.py b/providers/apache/beam/tests/system/__init__.py similarity index 90% rename from providers/tests/email/operators/__init__.py rename to providers/apache/beam/tests/system/__init__.py index 217e5db960782..e8fd22856438c 100644 --- a/providers/tests/email/operators/__init__.py +++ b/providers/apache/beam/tests/system/__init__.py @@ -1,4 +1,3 @@ -# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information @@ -15,3 +14,4 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/apache/beam/tests/system/apache/__init__.py b/providers/apache/beam/tests/system/apache/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/apache/beam/tests/system/apache/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/apache/beam/tests/system/apache/beam/example_beam.py b/providers/apache/beam/tests/system/apache/beam/example_beam.py index 77464f157e564..85e1b674bb08f 100644 --- a/providers/apache/beam/tests/system/apache/beam/example_beam.py +++ b/providers/apache/beam/tests/system/apache/beam/example_beam.py @@ -21,17 +21,16 @@ from __future__ import annotations -from providers.apache.beam.tests.system.apache.beam.utils import ( +from airflow import models +from airflow.providers.apache.beam.operators.beam import BeamRunJavaPipelineOperator +from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator +from system.apache.beam.utils import ( GCS_INPUT, GCS_JAR_DIRECT_RUNNER_BUCKET_NAME, GCS_JAR_DIRECT_RUNNER_OBJECT_NAME, START_DATE, ) -from airflow import models -from airflow.providers.apache.beam.operators.beam import BeamRunJavaPipelineOperator -from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator - with models.DAG( "example_beam_native_java_direct_runner", schedule=None, # Override to match your needs diff --git a/providers/apache/beam/tests/system/apache/beam/example_beam_java_flink.py b/providers/apache/beam/tests/system/apache/beam/example_beam_java_flink.py index f34b8bea2166f..6f38c8bd8b91a 100644 --- a/providers/apache/beam/tests/system/apache/beam/example_beam_java_flink.py +++ b/providers/apache/beam/tests/system/apache/beam/example_beam_java_flink.py @@ -21,17 +21,16 @@ from __future__ import annotations -from providers.apache.beam.tests.system.apache.beam.utils import ( +from airflow import models +from airflow.providers.apache.beam.operators.beam import BeamRunJavaPipelineOperator +from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator +from system.apache.beam.utils import ( GCS_INPUT, GCS_JAR_FLINK_RUNNER_BUCKET_NAME, GCS_JAR_FLINK_RUNNER_OBJECT_NAME, START_DATE, ) -from airflow import models -from airflow.providers.apache.beam.operators.beam import BeamRunJavaPipelineOperator -from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator - with models.DAG( "example_beam_native_java_flink_runner", schedule=None, # Override to match your needs diff --git a/providers/apache/beam/tests/system/apache/beam/example_beam_java_spark.py b/providers/apache/beam/tests/system/apache/beam/example_beam_java_spark.py index a55225670fbfe..35d93cb0fb50d 100644 --- a/providers/apache/beam/tests/system/apache/beam/example_beam_java_spark.py +++ b/providers/apache/beam/tests/system/apache/beam/example_beam_java_spark.py @@ -21,17 +21,16 @@ from __future__ import annotations -from providers.apache.beam.tests.system.apache.beam.utils import ( +from airflow import models +from airflow.providers.apache.beam.operators.beam import BeamRunJavaPipelineOperator +from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator +from system.apache.beam.utils import ( GCS_INPUT, GCS_JAR_SPARK_RUNNER_BUCKET_NAME, GCS_JAR_SPARK_RUNNER_OBJECT_NAME, START_DATE, ) -from airflow import models -from airflow.providers.apache.beam.operators.beam import BeamRunJavaPipelineOperator -from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator - with models.DAG( "example_beam_native_java_spark_runner", schedule=None, # Override to match your needs diff --git a/providers/apache/beam/tests/system/apache/beam/example_go.py b/providers/apache/beam/tests/system/apache/beam/example_go.py index 5e2b63ea705bc..f2ada154a1a5d 100644 --- a/providers/apache/beam/tests/system/apache/beam/example_go.py +++ b/providers/apache/beam/tests/system/apache/beam/example_go.py @@ -21,7 +21,10 @@ from __future__ import annotations -from providers.apache.beam.tests.system.apache.beam.utils import ( +from airflow import models +from airflow.providers.apache.beam.operators.beam import BeamRunGoPipelineOperator +from airflow.providers.google.cloud.operators.dataflow import DataflowConfiguration +from system.apache.beam.utils import ( DEFAULT_ARGS, GCP_PROJECT_ID, GCS_GO, @@ -31,10 +34,6 @@ START_DATE, ) -from airflow import models -from airflow.providers.apache.beam.operators.beam import BeamRunGoPipelineOperator -from airflow.providers.google.cloud.operators.dataflow import DataflowConfiguration - with models.DAG( "example_beam_native_go", start_date=START_DATE, diff --git a/providers/apache/beam/tests/system/apache/beam/example_go_dataflow.py b/providers/apache/beam/tests/system/apache/beam/example_go_dataflow.py index 550ae1ea4031c..627c586922b63 100644 --- a/providers/apache/beam/tests/system/apache/beam/example_go_dataflow.py +++ b/providers/apache/beam/tests/system/apache/beam/example_go_dataflow.py @@ -21,7 +21,12 @@ from __future__ import annotations -from providers.apache.beam.tests.system.apache.beam.utils import ( +from airflow import models +from airflow.providers.apache.beam.operators.beam import BeamRunGoPipelineOperator +from airflow.providers.google.cloud.hooks.dataflow import DataflowJobStatus +from airflow.providers.google.cloud.operators.dataflow import DataflowConfiguration +from airflow.providers.google.cloud.sensors.dataflow import DataflowJobStatusSensor +from system.apache.beam.utils import ( DEFAULT_ARGS, GCP_PROJECT_ID, GCS_GO_DATAFLOW_ASYNC, @@ -31,12 +36,6 @@ START_DATE, ) -from airflow import models -from airflow.providers.apache.beam.operators.beam import BeamRunGoPipelineOperator -from airflow.providers.google.cloud.hooks.dataflow import DataflowJobStatus -from airflow.providers.google.cloud.operators.dataflow import DataflowConfiguration -from airflow.providers.google.cloud.sensors.dataflow import DataflowJobStatusSensor - with models.DAG( "example_beam_native_go_dataflow_async", default_args=DEFAULT_ARGS, diff --git a/providers/apache/beam/tests/system/apache/beam/example_java_dataflow.py b/providers/apache/beam/tests/system/apache/beam/example_java_dataflow.py index 66a845fc266cd..dd959ce8678ef 100644 --- a/providers/apache/beam/tests/system/apache/beam/example_java_dataflow.py +++ b/providers/apache/beam/tests/system/apache/beam/example_java_dataflow.py @@ -21,7 +21,10 @@ from __future__ import annotations -from providers.apache.beam.tests.system.apache.beam.utils import ( +from airflow import models +from airflow.providers.apache.beam.operators.beam import BeamRunJavaPipelineOperator +from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator +from system.apache.beam.utils import ( GCS_JAR_DATAFLOW_RUNNER_BUCKET_NAME, GCS_JAR_DATAFLOW_RUNNER_OBJECT_NAME, GCS_OUTPUT, @@ -30,10 +33,6 @@ START_DATE, ) -from airflow import models -from airflow.providers.apache.beam.operators.beam import BeamRunJavaPipelineOperator -from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator - with models.DAG( "example_beam_native_java_dataflow_runner", schedule=None, # Override to match your needs diff --git a/providers/apache/beam/tests/system/apache/beam/example_python.py b/providers/apache/beam/tests/system/apache/beam/example_python.py index cac2333afc2fe..53d31276c1a89 100644 --- a/providers/apache/beam/tests/system/apache/beam/example_python.py +++ b/providers/apache/beam/tests/system/apache/beam/example_python.py @@ -21,7 +21,10 @@ from __future__ import annotations -from providers.apache.beam.tests.system.apache.beam.utils import ( +from airflow import models +from airflow.providers.apache.beam.operators.beam import BeamRunPythonPipelineOperator +from airflow.providers.google.cloud.operators.dataflow import DataflowConfiguration +from system.apache.beam.utils import ( DEFAULT_ARGS, GCP_PROJECT_ID, GCS_OUTPUT, @@ -31,10 +34,6 @@ START_DATE, ) -from airflow import models -from airflow.providers.apache.beam.operators.beam import BeamRunPythonPipelineOperator -from airflow.providers.google.cloud.operators.dataflow import DataflowConfiguration - with models.DAG( "example_beam_native_python", start_date=START_DATE, diff --git a/providers/apache/beam/tests/system/apache/beam/example_python_async.py b/providers/apache/beam/tests/system/apache/beam/example_python_async.py index 5ec1bed622ea4..9cdf133e6e708 100644 --- a/providers/apache/beam/tests/system/apache/beam/example_python_async.py +++ b/providers/apache/beam/tests/system/apache/beam/example_python_async.py @@ -21,7 +21,10 @@ from __future__ import annotations -from providers.apache.beam.tests.system.apache.beam.utils import ( +from airflow import models +from airflow.providers.apache.beam.operators.beam import BeamRunPythonPipelineOperator +from airflow.providers.google.cloud.operators.dataflow import DataflowConfiguration +from system.apache.beam.utils import ( DEFAULT_ARGS, GCP_PROJECT_ID, GCS_OUTPUT, @@ -31,10 +34,6 @@ START_DATE, ) -from airflow import models -from airflow.providers.apache.beam.operators.beam import BeamRunPythonPipelineOperator -from airflow.providers.google.cloud.operators.dataflow import DataflowConfiguration - with models.DAG( dag_id="dataflow_native_python_async", start_date=START_DATE, diff --git a/providers/apache/beam/tests/system/apache/beam/example_python_dataflow.py b/providers/apache/beam/tests/system/apache/beam/example_python_dataflow.py index eea6410839b11..828d7d45ea34e 100644 --- a/providers/apache/beam/tests/system/apache/beam/example_python_dataflow.py +++ b/providers/apache/beam/tests/system/apache/beam/example_python_dataflow.py @@ -21,7 +21,12 @@ from __future__ import annotations -from providers.apache.beam.tests.system.apache.beam.utils import ( +from airflow import models +from airflow.providers.apache.beam.operators.beam import BeamRunPythonPipelineOperator +from airflow.providers.google.cloud.hooks.dataflow import DataflowJobStatus +from airflow.providers.google.cloud.operators.dataflow import DataflowConfiguration +from airflow.providers.google.cloud.sensors.dataflow import DataflowJobStatusSensor +from system.apache.beam.utils import ( DEFAULT_ARGS, GCP_PROJECT_ID, GCS_OUTPUT, @@ -31,12 +36,6 @@ START_DATE, ) -from airflow import models -from airflow.providers.apache.beam.operators.beam import BeamRunPythonPipelineOperator -from airflow.providers.google.cloud.hooks.dataflow import DataflowJobStatus -from airflow.providers.google.cloud.operators.dataflow import DataflowConfiguration -from airflow.providers.google.cloud.sensors.dataflow import DataflowJobStatusSensor - with models.DAG( "example_beam_native_python_dataflow_async", default_args=DEFAULT_ARGS, diff --git a/providers/apache/cassandra/tests/conftest.py b/providers/apache/cassandra/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/apache/cassandra/tests/conftest.py +++ b/providers/apache/cassandra/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/apache/cassandra/tests/integration/__init__.py b/providers/apache/cassandra/tests/integration/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/apache/cassandra/tests/integration/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/apache/cassandra/tests/integration/apache/__init__.py b/providers/apache/cassandra/tests/integration/apache/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/apache/cassandra/tests/integration/apache/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/tests/atlassian/__init__.py b/providers/apache/cassandra/tests/integration/apache/cassandra/__init__.py similarity index 100% rename from providers/tests/atlassian/__init__.py rename to providers/apache/cassandra/tests/integration/apache/cassandra/__init__.py diff --git a/providers/tests/cncf/__init__.py b/providers/apache/cassandra/tests/integration/apache/cassandra/hooks/__init__.py similarity index 100% rename from providers/tests/cncf/__init__.py rename to providers/apache/cassandra/tests/integration/apache/cassandra/hooks/__init__.py diff --git a/providers/tests/integration/apache/cassandra/hooks/test_cassandra.py b/providers/apache/cassandra/tests/integration/apache/cassandra/hooks/test_cassandra.py similarity index 100% rename from providers/tests/integration/apache/cassandra/hooks/test_cassandra.py rename to providers/apache/cassandra/tests/integration/apache/cassandra/hooks/test_cassandra.py diff --git a/providers/apache/cassandra/tests/system/__init__.py b/providers/apache/cassandra/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/apache/cassandra/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/apache/cassandra/tests/system/apache/__init__.py b/providers/apache/cassandra/tests/system/apache/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/apache/cassandra/tests/system/apache/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/apache/drill/tests/conftest.py b/providers/apache/drill/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/apache/drill/tests/conftest.py +++ b/providers/apache/drill/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/apache/drill/tests/integration/__init__.py b/providers/apache/drill/tests/integration/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/apache/drill/tests/integration/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/apache/drill/tests/integration/apache/__init__.py b/providers/apache/drill/tests/integration/apache/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/apache/drill/tests/integration/apache/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/tests/common/__init__.py b/providers/apache/drill/tests/integration/apache/drill/__init__.py similarity index 100% rename from providers/tests/common/__init__.py rename to providers/apache/drill/tests/integration/apache/drill/__init__.py diff --git a/providers/tests/dbt/__init__.py b/providers/apache/drill/tests/integration/apache/drill/hooks/__init__.py similarity index 100% rename from providers/tests/dbt/__init__.py rename to providers/apache/drill/tests/integration/apache/drill/hooks/__init__.py diff --git a/providers/tests/integration/apache/drill/hooks/test_drill.py b/providers/apache/drill/tests/integration/apache/drill/hooks/test_drill.py similarity index 100% rename from providers/tests/integration/apache/drill/hooks/test_drill.py rename to providers/apache/drill/tests/integration/apache/drill/hooks/test_drill.py diff --git a/providers/tests/integration/__init__.py b/providers/apache/drill/tests/integration/apache/drill/operators/__init__.py similarity index 100% rename from providers/tests/integration/__init__.py rename to providers/apache/drill/tests/integration/apache/drill/operators/__init__.py diff --git a/providers/apache/drill/tests/system/__init__.py b/providers/apache/drill/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/apache/drill/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/apache/drill/tests/system/apache/__init__.py b/providers/apache/drill/tests/system/apache/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/apache/drill/tests/system/apache/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/apache/druid/tests/conftest.py b/providers/apache/druid/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/apache/druid/tests/conftest.py +++ b/providers/apache/druid/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/apache/druid/tests/system/__init__.py b/providers/apache/druid/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/apache/druid/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/apache/druid/tests/system/apache/__init__.py b/providers/apache/druid/tests/system/apache/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/apache/druid/tests/system/apache/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/apache/flink/docs/index.rst b/providers/apache/flink/docs/index.rst index 9299b9cc320ac..4fa0cae59f71f 100644 --- a/providers/apache/flink/docs/index.rst +++ b/providers/apache/flink/docs/index.rst @@ -47,7 +47,7 @@ :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/providers/apache/flink/tests/conftest.py b/providers/apache/flink/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/apache/flink/tests/conftest.py +++ b/providers/apache/flink/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/apache/hdfs/tests/conftest.py b/providers/apache/hdfs/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/apache/hdfs/tests/conftest.py +++ b/providers/apache/hdfs/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/apache/hive/tests/conftest.py b/providers/apache/hive/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/apache/hive/tests/conftest.py +++ b/providers/apache/hive/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/apache/hive/tests/integration/__init__.py b/providers/apache/hive/tests/integration/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/apache/hive/tests/integration/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/apache/hive/tests/integration/apache/__init__.py b/providers/apache/hive/tests/integration/apache/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/apache/hive/tests/integration/apache/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/tests/integration/apache/__init__.py b/providers/apache/hive/tests/integration/apache/hive/__init__.py similarity index 100% rename from providers/tests/integration/apache/__init__.py rename to providers/apache/hive/tests/integration/apache/hive/__init__.py diff --git a/providers/tests/integration/apache/cassandra/__init__.py b/providers/apache/hive/tests/integration/apache/hive/transfers/__init__.py similarity index 100% rename from providers/tests/integration/apache/cassandra/__init__.py rename to providers/apache/hive/tests/integration/apache/hive/transfers/__init__.py diff --git a/providers/tests/integration/apache/hive/transfers/test_mssql_to_hive.py b/providers/apache/hive/tests/integration/apache/hive/transfers/test_mssql_to_hive.py similarity index 100% rename from providers/tests/integration/apache/hive/transfers/test_mssql_to_hive.py rename to providers/apache/hive/tests/integration/apache/hive/transfers/test_mssql_to_hive.py diff --git a/providers/apache/hive/tests/provider_tests/apache/hive/transfers/test_s3_to_hive.py b/providers/apache/hive/tests/provider_tests/apache/hive/transfers/test_s3_to_hive.py index eea3dd39a905a..8f25a10ca0bd9 100644 --- a/providers/apache/hive/tests/provider_tests/apache/hive/transfers/test_s3_to_hive.py +++ b/providers/apache/hive/tests/provider_tests/apache/hive/transfers/test_s3_to_hive.py @@ -29,6 +29,7 @@ import pytest +import providers.microsoft.azure.tests.provider_tests.microsoft.azure.test_utils from airflow.exceptions import AirflowException from airflow.providers.apache.hive.transfers.s3_to_hive import S3ToHiveOperator, uncompress_file @@ -220,8 +221,8 @@ def test_execute(self, mock_hiveclihook): # file parameter to HiveCliHook.load_file is compared # against expected file output - mock_hiveclihook().load_file.side_effect = lambda *args, **kwargs: self._load_file_side_effect( - args, op_fn, ext + providers.microsoft.azure.tests.provider_tests.microsoft.azure.test_utils.load_file.side_effect = ( + lambda *args, **kwargs: self._load_file_side_effect(args, op_fn, ext) ) # Execute S3ToHiveTransfer s32hive = S3ToHiveOperator(**self.kwargs) diff --git a/providers/apache/hive/tests/system/__init__.py b/providers/apache/hive/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/apache/hive/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/apache/hive/tests/system/apache/__init__.py b/providers/apache/hive/tests/system/apache/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/apache/hive/tests/system/apache/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/apache/iceberg/tests/conftest.py b/providers/apache/iceberg/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/apache/iceberg/tests/conftest.py +++ b/providers/apache/iceberg/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/apache/iceberg/tests/system/__init__.py b/providers/apache/iceberg/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/apache/iceberg/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/apache/iceberg/tests/system/apache/__init__.py b/providers/apache/iceberg/tests/system/apache/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/apache/iceberg/tests/system/apache/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/apache/impala/tests/conftest.py b/providers/apache/impala/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/apache/impala/tests/conftest.py +++ b/providers/apache/impala/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/apache/kafka/tests/conftest.py b/providers/apache/kafka/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/apache/kafka/tests/conftest.py +++ b/providers/apache/kafka/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/apache/kafka/tests/integration/__init__.py b/providers/apache/kafka/tests/integration/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/apache/kafka/tests/integration/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/apache/kafka/tests/integration/apache/__init__.py b/providers/apache/kafka/tests/integration/apache/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/apache/kafka/tests/integration/apache/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/tests/integration/apache/cassandra/hooks/__init__.py b/providers/apache/kafka/tests/integration/apache/kafka/__init__.py similarity index 100% rename from providers/tests/integration/apache/cassandra/hooks/__init__.py rename to providers/apache/kafka/tests/integration/apache/kafka/__init__.py diff --git a/providers/tests/integration/apache/drill/__init__.py b/providers/apache/kafka/tests/integration/apache/kafka/hooks/__init__.py similarity index 100% rename from providers/tests/integration/apache/drill/__init__.py rename to providers/apache/kafka/tests/integration/apache/kafka/hooks/__init__.py diff --git a/providers/tests/integration/apache/kafka/hooks/test_admin_client.py b/providers/apache/kafka/tests/integration/apache/kafka/hooks/test_admin_client.py similarity index 100% rename from providers/tests/integration/apache/kafka/hooks/test_admin_client.py rename to providers/apache/kafka/tests/integration/apache/kafka/hooks/test_admin_client.py diff --git a/providers/tests/integration/apache/kafka/hooks/test_consumer.py b/providers/apache/kafka/tests/integration/apache/kafka/hooks/test_consumer.py similarity index 100% rename from providers/tests/integration/apache/kafka/hooks/test_consumer.py rename to providers/apache/kafka/tests/integration/apache/kafka/hooks/test_consumer.py diff --git a/providers/tests/integration/apache/kafka/hooks/test_producer.py b/providers/apache/kafka/tests/integration/apache/kafka/hooks/test_producer.py similarity index 100% rename from providers/tests/integration/apache/kafka/hooks/test_producer.py rename to providers/apache/kafka/tests/integration/apache/kafka/hooks/test_producer.py diff --git a/providers/tests/integration/apache/drill/hooks/__init__.py b/providers/apache/kafka/tests/integration/apache/kafka/operators/__init__.py similarity index 100% rename from providers/tests/integration/apache/drill/hooks/__init__.py rename to providers/apache/kafka/tests/integration/apache/kafka/operators/__init__.py diff --git a/providers/tests/integration/apache/kafka/operators/test_consume.py b/providers/apache/kafka/tests/integration/apache/kafka/operators/test_consume.py similarity index 97% rename from providers/tests/integration/apache/kafka/operators/test_consume.py rename to providers/apache/kafka/tests/integration/apache/kafka/operators/test_consume.py index c6757f884cbad..a069ae66e9fdd 100644 --- a/providers/tests/integration/apache/kafka/operators/test_consume.py +++ b/providers/apache/kafka/tests/integration/apache/kafka/operators/test_consume.py @@ -89,7 +89,7 @@ def test_consumer_operator_test_1(self): operator = ConsumeFromTopicOperator( kafka_config_id=TOPIC, topics=[TOPIC], - apply_function="providers.tests.integration.apache.kafka.operators.test_consume._basic_message_tester", + apply_function="integration.apache.kafka.operators.test_consume._basic_message_tester", apply_function_kwargs={"test": TOPIC}, task_id="test", poll_timeout=10, diff --git a/providers/tests/integration/apache/kafka/operators/test_produce.py b/providers/apache/kafka/tests/integration/apache/kafka/operators/test_produce.py similarity index 96% rename from providers/tests/integration/apache/kafka/operators/test_produce.py rename to providers/apache/kafka/tests/integration/apache/kafka/operators/test_produce.py index 3bb006ec74ac0..3662d5465a5e6 100644 --- a/providers/tests/integration/apache/kafka/operators/test_produce.py +++ b/providers/apache/kafka/tests/integration/apache/kafka/operators/test_produce.py @@ -65,7 +65,7 @@ def test_producer_operator_test_1(self): kafka_config_id="kafka_default", task_id="produce_to_topic", topic=TOPIC, - producer_function="providers.tests.integration.apache.kafka.operators.test_produce._producer_function", + producer_function="integration.apache.kafka.operators.test_produce._producer_function", ) t.execute(context={}) diff --git a/providers/tests/integration/apache/drill/operators/__init__.py b/providers/apache/kafka/tests/integration/apache/kafka/sensors/__init__.py similarity index 100% rename from providers/tests/integration/apache/drill/operators/__init__.py rename to providers/apache/kafka/tests/integration/apache/kafka/sensors/__init__.py diff --git a/providers/tests/integration/apache/hive/__init__.py b/providers/apache/kafka/tests/integration/apache/kafka/triggers/__init__.py similarity index 100% rename from providers/tests/integration/apache/hive/__init__.py rename to providers/apache/kafka/tests/integration/apache/kafka/triggers/__init__.py diff --git a/providers/tests/integration/apache/kafka/triggers/test_await_message.py b/providers/apache/kafka/tests/integration/apache/kafka/triggers/test_await_message.py similarity index 96% rename from providers/tests/integration/apache/kafka/triggers/test_await_message.py rename to providers/apache/kafka/tests/integration/apache/kafka/triggers/test_await_message.py index 2cac576372a33..ddf5dd8c3028e 100644 --- a/providers/tests/integration/apache/kafka/triggers/test_await_message.py +++ b/providers/apache/kafka/tests/integration/apache/kafka/triggers/test_await_message.py @@ -75,7 +75,7 @@ async def test_trigger_await_message_test_1(self): trigger = AwaitMessageTrigger( topics=[TOPIC], - apply_function="providers.tests.integration.apache.kafka.triggers.test_await_message._apply_function", + apply_function="integration.apache.kafka.triggers.test_await_message._apply_function", apply_function_args=None, apply_function_kwargs=None, kafka_config_id="trigger.await_message.test.integration.test_1", diff --git a/providers/apache/kafka/tests/system/__init__.py b/providers/apache/kafka/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/apache/kafka/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/apache/kafka/tests/system/apache/__init__.py b/providers/apache/kafka/tests/system/apache/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/apache/kafka/tests/system/apache/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/apache/kylin/tests/conftest.py b/providers/apache/kylin/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/apache/kylin/tests/conftest.py +++ b/providers/apache/kylin/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/apache/kylin/tests/system/__init__.py b/providers/apache/kylin/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/apache/kylin/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/apache/kylin/tests/system/apache/__init__.py b/providers/apache/kylin/tests/system/apache/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/apache/kylin/tests/system/apache/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/apache/livy/tests/conftest.py b/providers/apache/livy/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/apache/livy/tests/conftest.py +++ b/providers/apache/livy/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/apache/livy/tests/system/__init__.py b/providers/apache/livy/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/apache/livy/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/apache/livy/tests/system/apache/__init__.py b/providers/apache/livy/tests/system/apache/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/apache/livy/tests/system/apache/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/apache/pig/tests/conftest.py b/providers/apache/pig/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/apache/pig/tests/conftest.py +++ b/providers/apache/pig/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/apache/pig/tests/system/__init__.py b/providers/apache/pig/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/apache/pig/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/apache/pig/tests/system/apache/__init__.py b/providers/apache/pig/tests/system/apache/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/apache/pig/tests/system/apache/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/apache/pinot/tests/conftest.py b/providers/apache/pinot/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/apache/pinot/tests/conftest.py +++ b/providers/apache/pinot/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/apache/pinot/tests/integration/__init__.py b/providers/apache/pinot/tests/integration/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/apache/pinot/tests/integration/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/apache/pinot/tests/integration/apache/__init__.py b/providers/apache/pinot/tests/integration/apache/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/apache/pinot/tests/integration/apache/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/apache/pinot/tests/integration/apache/pinot/__init__.py b/providers/apache/pinot/tests/integration/apache/pinot/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/apache/pinot/tests/integration/apache/pinot/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/tests/integration/apache/hive/transfers/__init__.py b/providers/apache/pinot/tests/integration/apache/pinot/hooks/__init__.py similarity index 100% rename from providers/tests/integration/apache/hive/transfers/__init__.py rename to providers/apache/pinot/tests/integration/apache/pinot/hooks/__init__.py diff --git a/providers/tests/integration/apache/pinot/hooks/test_pinot.py b/providers/apache/pinot/tests/integration/apache/pinot/hooks/test_pinot.py similarity index 100% rename from providers/tests/integration/apache/pinot/hooks/test_pinot.py rename to providers/apache/pinot/tests/integration/apache/pinot/hooks/test_pinot.py diff --git a/providers/apache/pinot/tests/system/__init__.py b/providers/apache/pinot/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/apache/pinot/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/apache/pinot/tests/system/apache/__init__.py b/providers/apache/pinot/tests/system/apache/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/apache/pinot/tests/system/apache/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/apache/spark/tests/conftest.py b/providers/apache/spark/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/apache/spark/tests/conftest.py +++ b/providers/apache/spark/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/apache/spark/tests/system/__init__.py b/providers/apache/spark/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/apache/spark/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/apache/spark/tests/system/apache/__init__.py b/providers/apache/spark/tests/system/apache/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/apache/spark/tests/system/apache/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/apprise/tests/conftest.py b/providers/apprise/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/apprise/tests/conftest.py +++ b/providers/apprise/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/arangodb/tests/conftest.py b/providers/arangodb/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/arangodb/tests/conftest.py +++ b/providers/arangodb/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/asana/src/airflow/providers/asana/hooks/asana.py b/providers/asana/src/airflow/providers/asana/hooks/asana.py index 83c5094fb0c49..8416d107fde3f 100644 --- a/providers/asana/src/airflow/providers/asana/hooks/asana.py +++ b/providers/asana/src/airflow/providers/asana/hooks/asana.py @@ -316,7 +316,6 @@ def update_project(self, project_id: str, params: dict) -> dict: for a list of possible parameters :return: A dict containing the updated project's attributes """ - body = {"data": params} projects_api_instance = ProjectsApi(self.client) try: diff --git a/providers/asana/tests/conftest.py b/providers/asana/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/asana/tests/conftest.py +++ b/providers/asana/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/asana/tests/system/__init__.py b/providers/asana/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/asana/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/atlassian/jira/tests/conftest.py b/providers/atlassian/jira/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/atlassian/jira/tests/conftest.py +++ b/providers/atlassian/jira/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/celery/tests/conftest.py b/providers/celery/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/celery/tests/conftest.py +++ b/providers/celery/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/cloudant/tests/conftest.py b/providers/cloudant/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/cloudant/tests/conftest.py +++ b/providers/cloudant/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/cncf/kubernetes/docs/index.rst b/providers/cncf/kubernetes/docs/index.rst index b8bb5dbab1d67..628941a7c264b 100644 --- a/providers/cncf/kubernetes/docs/index.rst +++ b/providers/cncf/kubernetes/docs/index.rst @@ -66,7 +66,7 @@ :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py b/providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py index 2d6f94d6ea018..c09291a70d505 100644 --- a/providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py +++ b/providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py @@ -73,7 +73,7 @@ def convert_env_vars(env_vars: list[k8s.V1EnvVar] | dict[str, str]) -> list[k8s. """ Coerce env var collection for kubernetes. - If the collection is a str-str dict, convert it into a list of ``V1EnvVar``s. + If the collection is a str-str dict, convert it into a list of ``V1EnvVar`` variables. """ if isinstance(env_vars, dict): return [k8s.V1EnvVar(name=k, value=v) for k, v in env_vars.items()] diff --git a/providers/cncf/kubernetes/tests/conftest.py b/providers/cncf/kubernetes/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/cncf/kubernetes/tests/conftest.py +++ b/providers/cncf/kubernetes/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/cncf/kubernetes/tests/system/__init__.py b/providers/cncf/kubernetes/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/cncf/kubernetes/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/cncf/kubernetes/tests/system/cncf/__init__.py b/providers/cncf/kubernetes/tests/system/cncf/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/cncf/kubernetes/tests/system/cncf/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/cncf/kubernetes/tests/system/cncf/kubernetes/example_kubernetes.py b/providers/cncf/kubernetes/tests/system/cncf/kubernetes/example_kubernetes.py index 08a85a4a71da6..dc74895a3e0fd 100644 --- a/providers/cncf/kubernetes/tests/system/cncf/kubernetes/example_kubernetes.py +++ b/providers/cncf/kubernetes/tests/system/cncf/kubernetes/example_kubernetes.py @@ -24,11 +24,12 @@ import os from datetime import datetime +from kubernetes.client import models as k8s + from airflow import DAG from airflow.providers.cncf.kubernetes.operators.pod import KubernetesPodOperator from airflow.providers.cncf.kubernetes.secret import Secret from airflow.providers.standard.operators.bash import BashOperator -from kubernetes.client import models as k8s # [START howto_operator_k8s_cluster_resources] secret_file = Secret("volume", "/etc/sql_conn", "airflow-secrets", "sql_alchemy_conn") diff --git a/providers/cncf/kubernetes/tests/system/cncf/kubernetes/example_kubernetes_async.py b/providers/cncf/kubernetes/tests/system/cncf/kubernetes/example_kubernetes_async.py index 2b8b7387e09f5..7eb08442be3ba 100644 --- a/providers/cncf/kubernetes/tests/system/cncf/kubernetes/example_kubernetes_async.py +++ b/providers/cncf/kubernetes/tests/system/cncf/kubernetes/example_kubernetes_async.py @@ -24,11 +24,12 @@ import os from datetime import datetime +from kubernetes.client import models as k8s + from airflow import DAG from airflow.providers.cncf.kubernetes.operators.pod import KubernetesPodOperator from airflow.providers.cncf.kubernetes.secret import Secret from airflow.providers.standard.operators.bash import BashOperator -from kubernetes.client import models as k8s # [START howto_operator_k8s_cluster_resources] secret_file = Secret("volume", "/etc/sql_conn", "airflow-secrets", "sql_alchemy_conn") diff --git a/providers/cncf/kubernetes/tests/system/cncf/kubernetes/example_kubernetes_kueue.py b/providers/cncf/kubernetes/tests/system/cncf/kubernetes/example_kubernetes_kueue.py index 063be244b7aa7..9aac715fdbed9 100644 --- a/providers/cncf/kubernetes/tests/system/cncf/kubernetes/example_kubernetes_kueue.py +++ b/providers/cncf/kubernetes/tests/system/cncf/kubernetes/example_kubernetes_kueue.py @@ -24,13 +24,14 @@ import os from datetime import datetime +from kubernetes.client import models as k8s + from airflow import DAG from airflow.providers.cncf.kubernetes.operators.kueue import ( KubernetesInstallKueueOperator, KubernetesStartKueueJobOperator, ) from airflow.providers.cncf.kubernetes.operators.resource import KubernetesCreateResourceOperator -from kubernetes.client import models as k8s ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "example_kubernetes_kueue_operators" diff --git a/providers/cohere/tests/conftest.py b/providers/cohere/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/cohere/tests/conftest.py +++ b/providers/cohere/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/cohere/tests/system/__init__.py b/providers/cohere/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/cohere/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/common/compat/tests/conftest.py b/providers/common/compat/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/common/compat/tests/conftest.py +++ b/providers/common/compat/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/common/io/tests/conftest.py b/providers/common/io/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/common/io/tests/conftest.py +++ b/providers/common/io/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/common/io/tests/system/__init__.py b/providers/common/io/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/common/io/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/common/io/tests/system/common/__init__.py b/providers/common/io/tests/system/common/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/common/io/tests/system/common/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/common/sql/tests/conftest.py b/providers/common/sql/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/common/sql/tests/conftest.py +++ b/providers/common/sql/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/common/sql/tests/system/__init__.py b/providers/common/sql/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/common/sql/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/common/sql/tests/system/common/__init__.py b/providers/common/sql/tests/system/common/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/common/sql/tests/system/common/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/databricks/docs/index.rst b/providers/databricks/docs/index.rst index f1b88e1bfea45..c1d59a7c2a2ff 100644 --- a/providers/databricks/docs/index.rst +++ b/providers/databricks/docs/index.rst @@ -57,7 +57,7 @@ :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/providers/databricks/tests/conftest.py b/providers/databricks/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/databricks/tests/conftest.py +++ b/providers/databricks/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/databricks/tests/system/__init__.py b/providers/databricks/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/databricks/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/datadog/tests/conftest.py b/providers/datadog/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/datadog/tests/conftest.py +++ b/providers/datadog/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/dbt/cloud/tests/conftest.py b/providers/dbt/cloud/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/dbt/cloud/tests/conftest.py +++ b/providers/dbt/cloud/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/dbt/cloud/tests/system/__init__.py b/providers/dbt/cloud/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/dbt/cloud/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/dbt/cloud/tests/system/dbt/__init__.py b/providers/dbt/cloud/tests/system/dbt/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/dbt/cloud/tests/system/dbt/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/dingding/tests/conftest.py b/providers/dingding/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/dingding/tests/conftest.py +++ b/providers/dingding/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/dingding/tests/system/__init__.py b/providers/dingding/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/dingding/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/discord/tests/conftest.py b/providers/discord/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/discord/tests/conftest.py +++ b/providers/discord/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/docker/tests/conftest.py b/providers/docker/tests/conftest.py index c907bb89e4a12..89feb74cf6913 100644 --- a/providers/docker/tests/conftest.py +++ b/providers/docker/tests/conftest.py @@ -16,7 +16,6 @@ # under the License. from __future__ import annotations -import pathlib from unittest import mock import pytest @@ -24,15 +23,6 @@ pytest_plugins = "tests_common.pytest_plugin" -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) - - @pytest.fixture def hook_conn(request): """ diff --git a/providers/docker/tests/system/__init__.py b/providers/docker/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/docker/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/docker/tests/system/docker/example_docker_copy_data.py b/providers/docker/tests/system/docker/example_docker_copy_data.py index 32cba19ef0e6d..7da16636ca368 100644 --- a/providers/docker/tests/system/docker/example_docker_copy_data.py +++ b/providers/docker/tests/system/docker/example_docker_copy_data.py @@ -29,11 +29,12 @@ import os from datetime import datetime +from docker.types import Mount + from airflow import models from airflow.providers.docker.operators.docker import DockerOperator from airflow.providers.standard.operators.bash import BashOperator from airflow.providers.standard.operators.python import ShortCircuitOperator -from docker.types import Mount ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID") DAG_ID = "docker_sample_copy_data" diff --git a/providers/edge/docs/install_on_windows.rst b/providers/edge/docs/install_on_windows.rst index 3e96e6eff9e6a..8e424a7bbf839 100644 --- a/providers/edge/docs/install_on_windows.rst +++ b/providers/edge/docs/install_on_windows.rst @@ -55,7 +55,7 @@ To setup a instance of Edge Worker on Windows, you need to follow the steps belo 7. Create a new folder ``dags`` in ``C:\\Airflow`` and copy the relevant DAG files in it. (At least the DAG files which should be executed on the edge alongside the dependencies. For testing purposes the DAGs from the ``apache-airflow`` repository can be used located in - .) + .) 8. Collect needed parameters from your running Airflow backend, at least the following: - ``edge`` / ``api_url``: The HTTP(s) endpoint where the Edge Worker connects to diff --git a/providers/edge/src/airflow/providers/edge/models/edge_worker.py b/providers/edge/src/airflow/providers/edge/models/edge_worker.py index 3765ac066cfae..982eb07db4a1b 100644 --- a/providers/edge/src/airflow/providers/edge/models/edge_worker.py +++ b/providers/edge/src/airflow/providers/edge/models/edge_worker.py @@ -134,7 +134,7 @@ def remove_queues(self, remove_queues: list[str]) -> None: self.queues = queues def update_state(self, state: str) -> None: - """Updates state field.""" + """Update state field.""" self.state = state @@ -190,7 +190,7 @@ def reset_metrics(worker_name: str) -> None: def request_maintenance( worker_name: str, maintenance_comment: str | None, session: Session = NEW_SESSION ) -> None: - """Writes maintenance request to the db""" + """Write maintenance request to the db.""" query = select(EdgeWorkerModel).where(EdgeWorkerModel.worker_name == worker_name) worker: EdgeWorkerModel = session.scalar(query) worker.state = EdgeWorkerState.MAINTENANCE_REQUEST @@ -199,7 +199,7 @@ def request_maintenance( @provide_session def exit_maintenance(worker_name: str, session: Session = NEW_SESSION) -> None: - """Writes maintenance exit to the db""" + """Write maintenance exit to the db.""" query = select(EdgeWorkerModel).where(EdgeWorkerModel.worker_name == worker_name) worker: EdgeWorkerModel = session.scalar(query) worker.state = EdgeWorkerState.MAINTENANCE_EXIT @@ -208,7 +208,7 @@ def exit_maintenance(worker_name: str, session: Session = NEW_SESSION) -> None: @provide_session def remove_worker(worker_name: str, session: Session = NEW_SESSION) -> None: - """Remove a worker that is offline or just gone from DB""" + """Remove a worker that is offline or just gone from DB.""" session.execute(delete(EdgeWorkerModel).where(EdgeWorkerModel.worker_name == worker_name)) @@ -216,7 +216,7 @@ def remove_worker(worker_name: str, session: Session = NEW_SESSION) -> None: def change_maintenance_comment( worker_name: str, maintenance_comment: str | None, session: Session = NEW_SESSION ) -> None: - """Writes maintenance comment in the db.""" + """Write maintenance comment in the db.""" query = select(EdgeWorkerModel).where(EdgeWorkerModel.worker_name == worker_name) worker: EdgeWorkerModel = session.scalar(query) worker.maintenance_comment = maintenance_comment diff --git a/providers/edge/tests/conftest.py b/providers/edge/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/edge/tests/conftest.py +++ b/providers/edge/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/elasticsearch/src/airflow/providers/elasticsearch/hooks/elasticsearch.py b/providers/elasticsearch/src/airflow/providers/elasticsearch/hooks/elasticsearch.py index 582e4abdb9e12..995d87415bb37 100644 --- a/providers/elasticsearch/src/airflow/providers/elasticsearch/hooks/elasticsearch.py +++ b/providers/elasticsearch/src/airflow/providers/elasticsearch/hooks/elasticsearch.py @@ -44,7 +44,7 @@ def connect( class ElasticsearchSQLCursor: - """A PEP 249-like Cursor class for Elasticsearch SQL API""" + """A PEP 249-like Cursor class for Elasticsearch SQL API.""" def __init__(self, es: Elasticsearch, **kwargs): self.es = es diff --git a/providers/elasticsearch/tests/conftest.py b/providers/elasticsearch/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/elasticsearch/tests/conftest.py +++ b/providers/elasticsearch/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/elasticsearch/tests/system/__init__.py b/providers/elasticsearch/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/elasticsearch/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/exasol/tests/conftest.py b/providers/exasol/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/exasol/tests/conftest.py +++ b/providers/exasol/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/fab/tests/conftest.py b/providers/fab/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/fab/tests/conftest.py +++ b/providers/fab/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/fab/tests/provider_tests/fab/auth_manager/api_endpoints/api_connexion_utils.py b/providers/fab/tests/provider_tests/fab/auth_manager/api_endpoints/api_connexion_utils.py index b208b845096b9..7a891d446b0ea 100644 --- a/providers/fab/tests/provider_tests/fab/auth_manager/api_endpoints/api_connexion_utils.py +++ b/providers/fab/tests/provider_tests/fab/auth_manager/api_endpoints/api_connexion_utils.py @@ -56,7 +56,6 @@ def create_user(app, username, role_name=None, email=None, permissions=None): # Removes user and role so each test has isolated test data. delete_user(app, username) - role = None if role_name: delete_role(app, role_name) role = create_role(app, role_name, permissions) diff --git a/providers/fab/tests/provider_tests/fab/auth_manager/models/test_db.py b/providers/fab/tests/provider_tests/fab/auth_manager/models/test_db.py index 91e1449571336..c9ef74d3d6259 100644 --- a/providers/fab/tests/provider_tests/fab/auth_manager/models/test_db.py +++ b/providers/fab/tests/provider_tests/fab/auth_manager/models/test_db.py @@ -19,12 +19,12 @@ import re from unittest import mock -import providers.fab.src.airflow.providers.fab as provider_fab import pytest from alembic.autogenerate import compare_metadata from alembic.migration import MigrationContext from sqlalchemy import MetaData +import providers.fab.src.airflow.providers.fab as provider_fab from airflow.settings import engine from airflow.utils.db import ( compare_server_default, diff --git a/providers/fab/tests/provider_tests/fab/www/views/conftest.py b/providers/fab/tests/provider_tests/fab/www/views/conftest.py index 8b770afe96509..437b4f72bd80f 100644 --- a/providers/fab/tests/provider_tests/fab/www/views/conftest.py +++ b/providers/fab/tests/provider_tests/fab/www/views/conftest.py @@ -21,11 +21,11 @@ import jinja2 import pytest -from providers.fab.tests.provider_tests.fab.auth_manager.api_endpoints.api_connexion_utils import delete_user from airflow import settings from airflow.models import DagBag from airflow.www.app import create_app +from provider_tests.fab.auth_manager.api_endpoints.api_connexion_utils import delete_user from tests_common.test_utils.config import conf_vars from tests_common.test_utils.db import parse_and_sync_to_db diff --git a/providers/fab/tests/provider_tests/fab/www/views/test_views_acl.py b/providers/fab/tests/provider_tests/fab/www/views/test_views_acl.py index 3ffc5918baa7d..ef7ae0a2e258d 100644 --- a/providers/fab/tests/provider_tests/fab/www/views/test_views_acl.py +++ b/providers/fab/tests/provider_tests/fab/www/views/test_views_acl.py @@ -23,18 +23,27 @@ import pytest -from airflow.models import DagModel +from airflow import DAG, settings +from airflow.models import DagBag, DagModel, DagRun, TaskInstance, Variable +from airflow.models.errors import ParseImportError from airflow.security import permissions from airflow.utils import timezone from airflow.utils.session import create_session from airflow.utils.state import State from airflow.utils.types import DagRunTriggeredByType, DagRunType -from airflow.www.views import FILTER_STATUS_COOKIE -from provider_tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user_scope +from airflow.www.views import FILTER_STATUS_COOKIE, DagRunModelView +from provider_tests.fab.auth_manager.api_endpoints.api_connexion_utils import ( + create_test_client, + create_user, + create_user_scope, + delete_roles, + delete_user, +) from tests_common.test_utils.db import clear_db_runs from tests_common.test_utils.permissions import _resource_name from tests_common.test_utils.www import ( + capture_templates, # noqa: F401 check_content_in_response, check_content_not_in_response, client_with_login, @@ -85,6 +94,25 @@ } +def _get_appbuilder_pk_string(model_view_cls, instance) -> str: + """Utility to get Flask-Appbuilder's string format "pk" for an object. + + Used to generate requests to FAB action views without *too* much difficulty. + The implementation relies on FAB internals, but unfortunately I don't see + a better way around it. + + Example usage:: + + from airflow.www.views import TaskInstanceModelView + + ti = session.Query(TaskInstance).filter(...).one() + pk = _get_appbuilder_pk_string(TaskInstanceModelView, ti) + client.post("...", data={"action": "...", "rowid": pk}) + """ + pk_value = model_view_cls.datamodel.get_pk_value(instance) + return model_view_cls._serialize_pk_if_composite(model_view_cls, pk_value) + + @pytest.fixture(scope="module") def acl_app(app): security_manager = app.appbuilder.sm @@ -245,6 +273,93 @@ def client_all_dags(acl_app, user_all_dags): ) +@pytest.fixture +def client_single_dag(app, user_single_dag): + """Client for User that can only access the first DAG from TEST_FILTER_DAG_IDS""" + return client_with_login( + app, + username="user_single_dag", + password="user_single_dag", + ) + + +@pytest.fixture(scope="module") +def client_dr_without_dag_run_create(app): + create_user( + app, + username="all_dr_permissions_except_dag_run_create", + role_name="all_dr_permissions_except_dag_run_create", + permissions=[ + (permissions.ACTION_CAN_READ, permissions.RESOURCE_WEBSITE), + (permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG), + (permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_RUN), + (permissions.ACTION_CAN_EDIT, permissions.RESOURCE_DAG_RUN), + (permissions.ACTION_CAN_DELETE, permissions.RESOURCE_DAG_RUN), + (permissions.ACTION_CAN_ACCESS_MENU, permissions.RESOURCE_DAG_RUN), + ], + ) + + yield client_with_login( + app, + username="all_dr_permissions_except_dag_run_create", + password="all_dr_permissions_except_dag_run_create", + ) + + delete_user(app, username="all_dr_permissions_except_dag_run_create") # type: ignore + delete_roles(app) + + +@pytest.fixture(scope="module") +def client_dr_without_dag_edit(app): + create_user( + app, + username="all_dr_permissions_except_dag_edit", + role_name="all_dr_permissions_except_dag_edit", + permissions=[ + (permissions.ACTION_CAN_READ, permissions.RESOURCE_WEBSITE), + (permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG), + (permissions.ACTION_CAN_CREATE, permissions.RESOURCE_DAG_RUN), + (permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_RUN), + (permissions.ACTION_CAN_EDIT, permissions.RESOURCE_DAG_RUN), + (permissions.ACTION_CAN_DELETE, permissions.RESOURCE_DAG_RUN), + (permissions.ACTION_CAN_ACCESS_MENU, permissions.RESOURCE_DAG_RUN), + ], + ) + + yield client_with_login( + app, + username="all_dr_permissions_except_dag_edit", + password="all_dr_permissions_except_dag_edit", + ) + + delete_user(app, username="all_dr_permissions_except_dag_edit") # type: ignore + delete_roles(app) + + +@pytest.fixture(scope="module") +def user_no_importerror(app): + """Create User that cannot access Import Errors""" + return create_user( + app, + username="user_no_importerrors", + role_name="role_no_importerrors", + permissions=[ + (permissions.ACTION_CAN_READ, permissions.RESOURCE_WEBSITE), + (permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG), + ], + ) + + +@pytest.fixture +def client_no_importerror(app, user_no_importerror): + """Client for User that cannot access Import Errors""" + return client_with_login( + app, + username="user_no_importerrors", + password="user_no_importerrors", + ) + + def test_index_for_all_dag_user(client_all_dags): # The all dag user can access/view all dags. resp = client_all_dags.get("/", follow_redirects=True) @@ -588,6 +703,34 @@ def client_dags_tis_logs(acl_app, user_dags_tis_logs): ) +@pytest.fixture(scope="module") +def user_single_dag_edit(app): + """Create User that can edit DAG resource only a single DAG""" + return create_user( + app, + username="user_single_dag_edit", + role_name="role_single_dag", + permissions=[ + (permissions.ACTION_CAN_READ, permissions.RESOURCE_WEBSITE), + (permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG), + ( + permissions.ACTION_CAN_EDIT, + _resource_name("filter_test_1", permissions.RESOURCE_DAG), + ), + ], + ) + + +@pytest.fixture +def client_single_dag_edit(app, user_single_dag_edit): + """Client for User that can only edit the first DAG from TEST_FILTER_DAG_IDS""" + return client_with_login( + app, + username="user_single_dag_edit", + password="user_single_dag_edit", + ) + + RENDERED_TEMPLATES_URL = ( f"rendered-templates?task_id=runme_0&dag_id=example_bash_operator&" f"logical_date={urllib.parse.quote_plus(str(DEFAULT_DATE))}" @@ -867,6 +1010,90 @@ def client_anonymous(acl_app): return acl_app.test_client() +@pytest.fixture +def running_dag_run(session): + dag = DagBag().get_dag("example_bash_operator") + logical_date = timezone.datetime(2016, 1, 9) + dr = dag.create_dagrun( + state="running", + logical_date=logical_date, + data_interval=(logical_date, logical_date), + run_id="test_dag_runs_action", + run_type=DagRunType.MANUAL, + session=session, + run_after=logical_date, + triggered_by=DagRunTriggeredByType.TEST, + ) + session.add(dr) + tis = [ + TaskInstance(dag.get_task("runme_0"), run_id=dr.run_id, state="success"), + TaskInstance(dag.get_task("runme_1"), run_id=dr.run_id, state="failed"), + ] + session.bulk_save_objects(tis) + session.commit() + return dr + + +@pytest.fixture +def _working_dags(dag_maker): + for dag_id, tag in zip(TEST_FILTER_DAG_IDS, TEST_TAGS): + with dag_maker(dag_id=dag_id, fileloc=f"/{dag_id}.py", tags=[tag]): + # We need to enter+exit the dag maker context for it to create the dag + pass + + +@pytest.fixture +def _working_dags_with_read_perm(dag_maker): + for dag_id, tag in zip(TEST_FILTER_DAG_IDS, TEST_TAGS): + if dag_id == "filter_test_1": + access_control = {"role_single_dag": {"can_read"}} + else: + access_control = None + + with dag_maker(dag_id=dag_id, fileloc=f"/{dag_id}.py", tags=[tag], access_control=access_control): + pass + + +@pytest.fixture +def _working_dags_with_edit_perm(dag_maker): + for dag_id, tag in zip(TEST_FILTER_DAG_IDS, TEST_TAGS): + if dag_id == "filter_test_1": + access_control = {"role_single_dag": {"can_edit"}} + else: + access_control = None + + with dag_maker(dag_id=dag_id, fileloc=f"/{dag_id}.py", tags=[tag], access_control=access_control): + pass + + +@pytest.fixture +def _broken_dags(session): + from airflow.models.errors import ParseImportError + + for dag_id in TEST_FILTER_DAG_IDS: + session.add( + ParseImportError( + filename=f"/{dag_id}.py", bundle_name="dag_maker", stacktrace="Some Error\nTraceback:\n" + ) + ) + session.commit() + + +@pytest.fixture +def _broken_dags_after_working(dag_maker, session): + # First create and process a DAG file that works + path = "/all_in_one.py" + for dag_id in TEST_FILTER_DAG_IDS: + with dag_maker(dag_id=dag_id, fileloc=path, session=session): + pass + + # Then create an import error against that file + session.add( + ParseImportError(filename=path, bundle_name="dag_maker", stacktrace="Some Error\nTraceback:\n") + ) + session.commit() + + @pytest.mark.parametrize( "client, url, status_code, expected_content", [ @@ -951,6 +1178,127 @@ def client_ti_edit_without_dag_level_access(acl_app, user_ti_edit_without_dag_le ) +@pytest.fixture(scope="module", autouse=True) +def _init_blank_dagrun(): + """Make sure there are no runs before we test anything. + + This really shouldn't be needed, but tests elsewhere leave the db dirty. + """ + with create_session() as session: + session.query(DagRun).delete() + session.query(TaskInstance).delete() + + +@pytest.fixture(autouse=True) +def _reset_dagrun(): + yield + with create_session() as session: + session.query(DagRun).delete() + session.query(TaskInstance).delete() + + +@pytest.fixture +def one_dag_perm_user_client(app): + username = "test_user_one_dag_perm" + dag_id = "example_bash_operator" + sm = app.appbuilder.sm + perm = f"{permissions.RESOURCE_DAG_PREFIX}{dag_id}" + + sm.create_permission(permissions.ACTION_CAN_READ, perm) + + create_user( + app, + username=username, + role_name="User with permission to access only one dag", + permissions=[ + (permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_RUN), + (permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE), + (permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_LOG), + (permissions.ACTION_CAN_READ, permissions.RESOURCE_WEBSITE), + (permissions.ACTION_CAN_READ, perm), + ], + ) + + sm.find_user(username=username) + + yield client_with_login( + app, + username=username, + password=username, + ) + + delete_user(app, username=username) # type: ignore + delete_roles(app) + + +@pytest.fixture +def new_dag_to_delete(testing_dag_bundle): + dag = DAG( + "new_dag_to_delete", is_paused_upon_creation=True, schedule="0 * * * *", start_date=DEFAULT_DATE + ) + session = settings.Session() + DAG.bulk_write_to_db("testing", None, [dag], session=session) + return dag + + +@pytest.fixture +def per_dag_perm_user_client(app, new_dag_to_delete): + sm = app.appbuilder.sm + perm = f"{permissions.RESOURCE_DAG_PREFIX}{new_dag_to_delete.dag_id}" + + sm.create_permission(permissions.ACTION_CAN_DELETE, perm) + + create_user( + app, + username="test_user_per_dag_perms", + role_name="User with some perms", + permissions=[ + (permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG), + (permissions.ACTION_CAN_DELETE, perm), + (permissions.ACTION_CAN_READ, permissions.RESOURCE_WEBSITE), + ], + ) + + sm.find_user(username="test_user_per_dag_perms") + + yield client_with_login( + app, + username="test_user_per_dag_perms", + password="test_user_per_dag_perms", + ) + + delete_user(app, username="test_user_per_dag_perms") # type: ignore + delete_roles(app) + + +@pytest.fixture(scope="module") +def client_ti_without_dag_edit(app): + create_user( + app, + username="all_ti_permissions_except_dag_edit", + role_name="all_ti_permissions_except_dag_edit", + permissions=[ + (permissions.ACTION_CAN_READ, permissions.RESOURCE_WEBSITE), + (permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG), + (permissions.ACTION_CAN_CREATE, permissions.RESOURCE_TASK_INSTANCE), + (permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE), + (permissions.ACTION_CAN_EDIT, permissions.RESOURCE_TASK_INSTANCE), + (permissions.ACTION_CAN_DELETE, permissions.RESOURCE_TASK_INSTANCE), + (permissions.ACTION_CAN_ACCESS_MENU, permissions.RESOURCE_TASK_INSTANCE), + (permissions.ACTION_CAN_DELETE, permissions.RESOURCE_DAG_RUN), + ], + ) + + yield client_with_login( + app, + username="all_ti_permissions_except_dag_edit", + password="all_ti_permissions_except_dag_edit", + ) + + delete_user(app, username="all_ti_permissions_except_dag_edit") # type: ignore + delete_roles(app) + + def test_failure_edit_ti_without_dag_level_access(client_ti_edit_without_dag_level_access): form = dict( task_id="run_this_last", @@ -963,3 +1311,266 @@ def test_failure_edit_ti_without_dag_level_access(client_ti_edit_without_dag_lev ) resp = client_ti_edit_without_dag_level_access.post("/success", data=form, follow_redirects=True) check_content_not_in_response("Marked success on 1 task instances", resp) + + +def test_viewer_cant_trigger_dag(app): + """ + Test that the test_viewer user can't trigger DAGs. + """ + with create_test_client( + app, + user_name="test_user", + role_name="test_role", + permissions=[ + (permissions.ACTION_CAN_READ, permissions.RESOURCE_WEBSITE), + (permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG), + (permissions.ACTION_CAN_CREATE, permissions.RESOURCE_DAG_RUN), + ], + ) as client: + url = "dags/example_bash_operator/trigger" + resp = client.get(url, follow_redirects=True) + response_data = resp.data.decode() + assert "Access is Denied" in response_data + + +def test_get_dagrun_can_view_dags_without_edit_perms(session, running_dag_run, client_dr_without_dag_edit): + """Test that a user without dag_edit but with dag_read permission can view the records""" + assert session.query(DagRun).filter(DagRun.dag_id == running_dag_run.dag_id).count() == 2 + resp = client_dr_without_dag_edit.get("/dagrun/list/", follow_redirects=True) + check_content_in_response(running_dag_run.dag_id, resp) + + +def test_create_dagrun_permission_denied(session, client_dr_without_dag_run_create): + data = { + "state": "running", + "dag_id": "example_bash_operator", + "logical_date": "2018-07-06 05:06:03", + "run_id": "test_list_dagrun_includes_conf", + "conf": '{"include": "me"}', + } + + resp = client_dr_without_dag_run_create.post("/dagrun/add", data=data, follow_redirects=True) + check_content_in_response("Access is Denied", resp) + + +def test_delete_dagrun_permission_denied(session, running_dag_run, client_dr_without_dag_edit): + composite_key = _get_appbuilder_pk_string(DagRunModelView, running_dag_run) + + assert session.query(DagRun).filter(DagRun.dag_id == running_dag_run.dag_id).count() == 2 + resp = client_dr_without_dag_edit.post(f"/dagrun/delete/{composite_key}", follow_redirects=True) + check_content_in_response("Access is Denied", resp) + assert session.query(DagRun).filter(DagRun.dag_id == running_dag_run.dag_id).count() == 2 + + +@pytest.mark.parametrize( + "action", + ["clear", "set_success", "set_failed", "set_running"], + ids=["clear", "success", "failed", "running"], +) +def test_set_dag_runs_action_permission_denied(client_dr_without_dag_edit, running_dag_run, action): + running_dag_id = running_dag_run.id + resp = client_dr_without_dag_edit.post( + "/dagrun/action_post", + data={"action": action, "rowid": [str(running_dag_id)]}, + follow_redirects=True, + ) + check_content_in_response("Access is Denied", resp) + + +def test_delete_dagrun(session, admin_client, running_dag_run): + composite_key = _get_appbuilder_pk_string(DagRunModelView, running_dag_run) + assert session.query(DagRun).filter(DagRun.dag_id == running_dag_run.dag_id).count() == 2 + admin_client.post(f"/dagrun/delete/{composite_key}", follow_redirects=True) + assert session.query(DagRun).filter(DagRun.dag_id == running_dag_run.dag_id).count() == 1 + + +@pytest.mark.usefixtures("_broken_dags", "_working_dags") +def test_home_no_importerrors_perm(_broken_dags, client_no_importerror): + # Users without "can read on import errors" don't see any import errors + resp = client_no_importerror.get("home", follow_redirects=True) + check_content_not_in_response("Import Errors", resp) + + +TEST_FILTER_DAG_IDS = ["filter_test_1", "filter_test_2", "a_first_dag_id_asc", "filter.test"] +TEST_TAGS = ["example", "test", "team", "group"] + + +@pytest.fixture(scope="module") +def user_single_dag(app): + """Create User that can only access the first DAG from TEST_FILTER_DAG_IDS""" + return create_user( + app, + username="user_single_dag", + role_name="role_single_dag", + permissions=[ + (permissions.ACTION_CAN_READ, permissions.RESOURCE_WEBSITE), + (permissions.ACTION_CAN_READ, permissions.RESOURCE_IMPORT_ERROR), + ( + permissions.ACTION_CAN_READ, + _resource_name(TEST_FILTER_DAG_IDS[0], permissions.RESOURCE_DAG), + ), + ], + ) + + +@pytest.fixture +def testing_dag_bundle(): + from airflow.models.dagbundle import DagBundleModel + from airflow.utils.session import create_session + + with create_session() as session: + if session.query(DagBundleModel).filter(DagBundleModel.name == "testing").count() == 0: + testing = DagBundleModel(name="testing") + session.add(testing) + + +@pytest.fixture +def client_variable_reader(app, user_variable_reader): + """Client for User that can only access the first DAG from TEST_FILTER_DAG_IDS""" + return client_with_login( + app, + username="user_variable_reader", + password="user_variable_reader", + ) + + +VARIABLE = { + "key": "test_key", + "val": "text_val", + "description": "test_description", + "is_encrypted": True, +} + + +@pytest.fixture(autouse=True) +def _clear_variables(): + with create_session() as session: + session.query(Variable).delete() + + +@pytest.fixture(scope="module") +def user_variable_reader(app): + """Create User that can only read variables""" + return create_user( + app, + username="user_variable_reader", + role_name="role_variable_reader", + permissions=[ + (permissions.ACTION_CAN_READ, permissions.RESOURCE_VARIABLE), + (permissions.ACTION_CAN_READ, permissions.RESOURCE_WEBSITE), + ], + ) + + +@pytest.fixture +def variable(session): + variable = Variable( + key=VARIABLE["key"], + val=VARIABLE["val"], + description=VARIABLE["description"], + ) + session.add(variable) + session.commit() + yield variable + session.query(Variable).filter(Variable.key == VARIABLE["key"]).delete() + session.commit() + + +@pytest.mark.parametrize( + "page", + [ + "home", + "home?status=all", + "home?status=active", + "home?status=paused", + "home?lastrun=running", + "home?lastrun=failed", + "home?lastrun=all_states", + ], +) +@pytest.mark.usefixtures("_working_dags_with_read_perm", "_broken_dags") +def test_home_importerrors_filtered_singledag_user(client_single_dag, page): + # Users that can only see certain DAGs get a filtered list of import errors + resp = client_single_dag.get(page, follow_redirects=True) + check_content_in_response("Import Errors", resp) + # They can see the first DAGs import error + check_content_in_response(f"/{TEST_FILTER_DAG_IDS[0]}.py", resp) + check_content_in_response("Traceback", resp) + # But not the rest + for dag_id in TEST_FILTER_DAG_IDS[1:]: + check_content_not_in_response(f"/{dag_id}.py", resp) + + +def test_home_importerrors_missing_read_on_all_dags_in_file(_broken_dags_after_working, client_single_dag): + # If a user doesn't have READ on all DAGs in a file, that files traceback is redacted + resp = client_single_dag.get("home", follow_redirects=True) + check_content_in_response("Import Errors", resp) + # They can see the DAG file has an import error + check_content_in_response("all_in_one.py", resp) + # And the traceback is redacted + check_content_not_in_response("Traceback", resp) + check_content_in_response("REDACTED", resp) + + +def test_home_dag_list_filtered_singledag_user(_working_dags_with_read_perm, client_single_dag): + # Users that can only see certain DAGs get a filtered list + resp = client_single_dag.get("home", follow_redirects=True) + # They can see the first DAG + check_content_in_response(f"dag_id={TEST_FILTER_DAG_IDS[0]}", resp) + # But not the rest + for dag_id in TEST_FILTER_DAG_IDS[1:]: + check_content_not_in_response(f"dag_id={dag_id}", resp) + + +def test_home_dag_edit_permissions( + capture_templates, # noqa: F811 + _working_dags_with_edit_perm, + client_single_dag_edit, +): + with capture_templates() as templates: + client_single_dag_edit.get("home", follow_redirects=True) + + dags = templates[0].local_context["dags"] + assert len(dags) > 0 + dag_edit_perm_tuple = [(dag.dag_id, dag.can_edit) for dag in dags] + assert ("filter_test_1", True) in dag_edit_perm_tuple + assert ("filter_test_2", False) in dag_edit_perm_tuple + + +def test_graph_view_without_dag_permission(app, one_dag_perm_user_client): + url = "/dags/example_bash_operator/graph" + resp = one_dag_perm_user_client.get(url, follow_redirects=True) + assert resp.status_code == 200 + assert ( + resp.request.url + == "http://localhost/dags/example_bash_operator/grid?tab=graph&dag_run_id=TEST_RUN_ID" + ) + check_content_in_response("example_bash_operator", resp) + + url = "/dags/example_xcom/graph" + resp = one_dag_perm_user_client.get(url, follow_redirects=True) + assert resp.status_code == 200 + assert resp.request.url == "http://localhost/home" + check_content_in_response("Access is Denied", resp) + + +def test_delete_just_dag_per_dag_permissions(new_dag_to_delete, per_dag_perm_user_client): + resp = per_dag_perm_user_client.post( + f"delete?dag_id={new_dag_to_delete.dag_id}&next=/home", follow_redirects=True + ) + check_content_in_response(f"Deleting DAG with id {new_dag_to_delete.dag_id}.", resp) + + +def test_import_variables_form_hidden(app, client_variable_reader): + resp = client_variable_reader.get("/variable/list/") + check_content_not_in_response("Import Variables", resp) + + +def test_action_muldelete_access_denied(session, client_variable_reader, variable): + var_id = variable.id + resp = client_variable_reader.post( + "/variable/action_post", + data={"action": "muldelete", "rowid": [var_id]}, + follow_redirects=True, + ) + check_content_in_response("Access is Denied", resp) diff --git a/providers/facebook/tests/conftest.py b/providers/facebook/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/facebook/tests/conftest.py +++ b/providers/facebook/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/ftp/tests/conftest.py b/providers/ftp/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/ftp/tests/conftest.py +++ b/providers/ftp/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/ftp/tests/system/__init__.py b/providers/ftp/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/ftp/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/github/tests/conftest.py b/providers/github/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/github/tests/conftest.py +++ b/providers/github/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/github/tests/system/__init__.py b/providers/github/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/github/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/github/tests/system/github/example_github.py b/providers/github/tests/system/github/example_github.py index c3d1d3a4c7e4e..b076647177cc9 100644 --- a/providers/github/tests/system/github/example_github.py +++ b/providers/github/tests/system/github/example_github.py @@ -21,11 +21,12 @@ from datetime import datetime from typing import Any +from github import GithubException + from airflow.exceptions import AirflowException from airflow.models.dag import DAG from airflow.providers.github.operators.github import GithubOperator from airflow.providers.github.sensors.github import GithubSensor, GithubTagSensor -from github import GithubException ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID") DAG_ID = "example_github_operator" diff --git a/providers/google/docs/example-dags.rst b/providers/google/docs/example-dags.rst index 15245b948c6c1..6c504998d75c6 100644 --- a/providers/google/docs/example-dags.rst +++ b/providers/google/docs/example-dags.rst @@ -19,10 +19,8 @@ Example DAGs ============ You can learn how to use Google integrations by analyzing the source code of the example DAGs: -* `Google Ads `__ -* `Google Cloud (legacy) `__ -* `Google Cloud `__ -* `Google Firebase `__ -* `Google Marketing Platform `__ -* `Google Workplace `__ (formerly Google Suite) -* `Google LevelDB `__ +* `Google Ads `__ +* `Google Cloud `__ +* `Google Firebase `__ +* `Google Workplace `__ +* `Google LevelDB `__ diff --git a/providers/google/docs/operators/cloud/index.rst b/providers/google/docs/operators/cloud/index.rst index 3daed7a11e9bf..7635148040a1b 100644 --- a/providers/google/docs/operators/cloud/index.rst +++ b/providers/google/docs/operators/cloud/index.rst @@ -29,4 +29,4 @@ Google Cloud Operators .. note:: You can learn how to use Google Cloud integrations by analyzing the - `source code `_ of the particular example DAGs. + `source code `_ of the particular example DAGs. diff --git a/providers/google/pyproject.toml b/providers/google/pyproject.toml index 1eb762d88929a..11ff422d3d03e 100644 --- a/providers/google/pyproject.toml +++ b/providers/google/pyproject.toml @@ -143,7 +143,8 @@ dependencies = [ # Any change in the dependencies is preserved when the file is regenerated [project.optional-dependencies] "apache.beam" = [ - "apache-beam[gcp]", + 'apache-beam[gcp]>=2.53.0; python_version < "3.12"', + 'apache-beam[gcp]>=2.57.0; python_version >= "3.12"', ] "cncf.kubernetes" = [ "apache-airflow-providers-cncf-kubernetes>=10.1.0", diff --git a/providers/google/src/airflow/providers/google/get_provider_info.py b/providers/google/src/airflow/providers/google/get_provider_info.py index 99696addda63a..cc3e9e768e783 100644 --- a/providers/google/src/airflow/providers/google/get_provider_info.py +++ b/providers/google/src/airflow/providers/google/get_provider_info.py @@ -1630,7 +1630,10 @@ def get_provider_info(): "immutabledict>=4.2.0", ], "optional-dependencies": { - "apache.beam": ["apache-beam[gcp]"], + "apache.beam": [ + 'apache-beam[gcp]>=2.53.0; python_version < "3.12"', + 'apache-beam[gcp]>=2.57.0; python_version >= "3.12"', + ], "cncf.kubernetes": ["apache-airflow-providers-cncf-kubernetes>=10.1.0"], "leveldb": ["plyvel>=1.5.1"], "oracle": ["apache-airflow-providers-oracle>=3.1.0"], diff --git a/providers/google/tests/conftest.py b/providers/google/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/google/tests/conftest.py +++ b/providers/google/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/tests/deprecations_ignore.yml b/providers/google/tests/deprecations_ignore.yml similarity index 92% rename from providers/tests/deprecations_ignore.yml rename to providers/google/tests/deprecations_ignore.yml index bce6cbd36f6f2..41439cb443b33 100644 --- a/providers/tests/deprecations_ignore.yml +++ b/providers/google/tests/deprecations_ignore.yml @@ -15,17 +15,6 @@ # specific language governing permissions and limitations # under the License. --- - -- providers/tests/amazon/aws/deferrable/hooks/test_base_aws.py::TestAwsBaseAsyncHook::test_get_client_async -- providers/tests/amazon/aws/deferrable/hooks/test_redshift_cluster.py::TestRedshiftAsyncHook::test_cluster_status -- providers/tests/amazon/aws/deferrable/hooks/test_redshift_cluster.py::TestRedshiftAsyncHook::test_get_cluster_status -- providers/tests/amazon/aws/deferrable/hooks/test_redshift_cluster.py::TestRedshiftAsyncHook::test_get_cluster_status_exception -- providers/tests/amazon/aws/deferrable/hooks/test_redshift_cluster.py::TestRedshiftAsyncHook::test_pause_cluster -- providers/tests/amazon/aws/deferrable/hooks/test_redshift_cluster.py::TestRedshiftAsyncHook::test_resume_cluster -- providers/tests/amazon/aws/deferrable/hooks/test_redshift_cluster.py::TestRedshiftAsyncHook::test_resume_cluster_exception -- providers/tests/amazon/aws/triggers/test_redshift_cluster.py::TestRedshiftClusterTrigger::test_redshift_cluster_sensor_trigger_exception -- providers/tests/amazon/aws/triggers/test_redshift_cluster.py::TestRedshiftClusterTrigger::test_redshift_cluster_sensor_trigger_resuming_status -- providers/tests/amazon/aws/triggers/test_redshift_cluster.py::TestRedshiftClusterTrigger::test_redshift_cluster_sensor_trigger_success - providers/google/tests/provider_tests/google/common/auth_backend/test_google_openid.py::TestGoogleOpenID::test_success - providers/google/tests/provider_tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_api_resource_configs - providers/google/tests/provider_tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_api_resource_configs_duplication_warning diff --git a/providers/google/tests/integration/__init__.py b/providers/google/tests/integration/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/google/tests/integration/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/tests/integration/apache/kafka/__init__.py b/providers/google/tests/integration/google/__init__.py similarity index 100% rename from providers/tests/integration/apache/kafka/__init__.py rename to providers/google/tests/integration/google/__init__.py diff --git a/providers/tests/integration/apache/kafka/hooks/__init__.py b/providers/google/tests/integration/google/cloud/__init__.py similarity index 100% rename from providers/tests/integration/apache/kafka/hooks/__init__.py rename to providers/google/tests/integration/google/cloud/__init__.py diff --git a/providers/tests/integration/apache/kafka/operators/__init__.py b/providers/google/tests/integration/google/cloud/transfers/__init__.py similarity index 100% rename from providers/tests/integration/apache/kafka/operators/__init__.py rename to providers/google/tests/integration/google/cloud/transfers/__init__.py diff --git a/providers/tests/integration/google/cloud/transfers/test_bigquery_to_mssql.py b/providers/google/tests/integration/google/cloud/transfers/test_bigquery_to_mssql.py similarity index 100% rename from providers/tests/integration/google/cloud/transfers/test_bigquery_to_mssql.py rename to providers/google/tests/integration/google/cloud/transfers/test_bigquery_to_mssql.py diff --git a/providers/tests/integration/google/cloud/transfers/test_mssql_to_gcs.py b/providers/google/tests/integration/google/cloud/transfers/test_mssql_to_gcs.py similarity index 100% rename from providers/tests/integration/google/cloud/transfers/test_mssql_to_gcs.py rename to providers/google/tests/integration/google/cloud/transfers/test_mssql_to_gcs.py diff --git a/providers/tests/integration/google/cloud/transfers/test_trino_to_gcs.py b/providers/google/tests/integration/google/cloud/transfers/test_trino_to_gcs.py similarity index 100% rename from providers/tests/integration/google/cloud/transfers/test_trino_to_gcs.py rename to providers/google/tests/integration/google/cloud/transfers/test_trino_to_gcs.py diff --git a/providers/google/tests/provider_tests/google/cloud/hooks/test_cloud_sql.py b/providers/google/tests/provider_tests/google/cloud/hooks/test_cloud_sql.py index 2e210faf332fa..1e589e8a04820 100644 --- a/providers/google/tests/provider_tests/google/cloud/hooks/test_cloud_sql.py +++ b/providers/google/tests/provider_tests/google/cloud/hooks/test_cloud_sql.py @@ -1596,6 +1596,8 @@ def get_processor(): processor = os.uname().machine if processor == "x86_64": processor = "amd64" + if processor == "aarch64": + processor = "arm64" return processor @@ -1706,7 +1708,6 @@ async def test_async_get_operation_exception_should_execute_successfully( self, mocked_get_conn, hook_async ): """Assets that the logging is done correctly when CloudSQLAsyncHook raises HttpError""" - mocked_get_conn.side_effect = HttpError( resp=mock.MagicMock(status=409), content=b"Operation already exists" ) diff --git a/providers/google/tests/provider_tests/google/cloud/operators/test_dataprep_system.py b/providers/google/tests/provider_tests/google/cloud/operators/test_dataprep_system.py index 0a3043f35defe..8efba351c625d 100644 --- a/providers/google/tests/provider_tests/google/cloud/operators/test_dataprep_system.py +++ b/providers/google/tests/provider_tests/google/cloud/operators/test_dataprep_system.py @@ -53,7 +53,7 @@ def teardown_method(self): clear_db_connections() def test_run_example_dag(self): - from providers.google.tests.provider_tests.google.cloud.dataprep.example_dataprep import dag + from provider_tests.google.cloud.dataprep.example_dataprep import dag run = get_test_run(dag) run() diff --git a/providers/google/tests/system/__init__.py b/providers/google/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/google/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/google/tests/system/google/README.md b/providers/google/tests/system/google/README.md index 4dafa15a1b0bb..95695feb9bdad 100644 --- a/providers/google/tests/system/google/README.md +++ b/providers/google/tests/system/google/README.md @@ -31,19 +31,19 @@ is anything more needed for the test to be executed, it should be documented in Example files structure: ``` -providers/tests/system/google - ├── bigquery - │ ├── resources - │ │ ├── example_bigquery_query.sql - │ │ └── us-states.csv - │ ├── example_bigquery_queries.py - │ ├── example_bigquery_operations.py - . . - │ └── example_bigquery_*.py - ├── dataflow - ├── gcs - . - └── * +providers/google/tests/system/google + ├── bigquery + │ ├── resources + │ │ ├── example_bigquery_query.sql + │ │ └── us-states.csv + │ ├── example_bigquery_queries.py + │ ├── example_bigquery_operations.py + . . + │ └── example_bigquery_*.py + ├── dataflow + ├── gcs + . + └── * ``` ## Initial configuration diff --git a/providers/google/tests/system/google/ads/example_ads.py b/providers/google/tests/system/google/ads/example_ads.py index 9b3fe5e20080f..668efcad7aaf7 100644 --- a/providers/google/tests/system/google/ads/example_ads.py +++ b/providers/google/tests/system/google/ads/example_ads.py @@ -24,13 +24,12 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.models.dag import DAG from airflow.providers.google.ads.operators.ads import GoogleAdsListAccountsOperator from airflow.providers.google.ads.transfers.ads_to_gcs import GoogleAdsToGcsOperator from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID # [START howto_google_ads_env_variables] ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") diff --git a/providers/google/tests/system/google/cloud/azure/example_azure_fileshare_to_gcs.py b/providers/google/tests/system/google/cloud/azure/example_azure_fileshare_to_gcs.py index 0156c15b1a40e..ee38cf5e761c5 100644 --- a/providers/google/tests/system/google/cloud/azure/example_azure_fileshare_to_gcs.py +++ b/providers/google/tests/system/google/cloud/azure/example_azure_fileshare_to_gcs.py @@ -19,12 +19,11 @@ import os from datetime import datetime, timedelta -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.transfers.azure_fileshare_to_gcs import AzureFileShareToGCSOperator from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/bigquery/example_bigquery_dts.py b/providers/google/tests/system/google/cloud/bigquery/example_bigquery_dts.py index b73d47828020c..58e1411f0fa67 100644 --- a/providers/google/tests/system/google/cloud/bigquery/example_bigquery_dts.py +++ b/providers/google/tests/system/google/cloud/bigquery/example_bigquery_dts.py @@ -27,9 +27,6 @@ from pathlib import Path from typing import cast -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID -from providers.openlineage.tests.system.openlineage.operator import OpenLineageTestOperator - from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.models.xcom_arg import XComArg @@ -47,6 +44,8 @@ from airflow.providers.google.cloud.sensors.bigquery_dts import BigQueryDataTransferServiceTransferRunSensor from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from system.openlineage.operator import OpenLineageTestOperator ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/bigquery/example_bigquery_jobs.py b/providers/google/tests/system/google/cloud/bigquery/example_bigquery_jobs.py index 64765975ab5a0..be7a8bec23155 100644 --- a/providers/google/tests/system/google/cloud/bigquery/example_bigquery_jobs.py +++ b/providers/google/tests/system/google/cloud/bigquery/example_bigquery_jobs.py @@ -25,9 +25,6 @@ from datetime import datetime from pathlib import Path -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID -from providers.openlineage.tests.system.openlineage.operator import OpenLineageTestOperator - from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.bigquery import ( BigQueryCreateEmptyDatasetOperator, @@ -37,6 +34,8 @@ ) from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from system.openlineage.operator import OpenLineageTestOperator ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/bigquery/example_bigquery_operations.py b/providers/google/tests/system/google/cloud/bigquery/example_bigquery_operations.py index 8e98245d320c2..80f759e4001c2 100644 --- a/providers/google/tests/system/google/cloud/bigquery/example_bigquery_operations.py +++ b/providers/google/tests/system/google/cloud/bigquery/example_bigquery_operations.py @@ -25,8 +25,6 @@ from datetime import datetime from pathlib import Path -from providers.openlineage.tests.system.openlineage.operator import OpenLineageTestOperator - from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.bigquery import ( BigQueryCreateEmptyDatasetOperator, @@ -36,6 +34,7 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator from airflow.utils.trigger_rule import TriggerRule +from system.openlineage.operator import OpenLineageTestOperator ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "bigquery_operations" diff --git a/providers/google/tests/system/google/cloud/bigquery/example_bigquery_queries.py b/providers/google/tests/system/google/cloud/bigquery/example_bigquery_queries.py index f1f6f31f3efa7..1494f3ef9e42b 100644 --- a/providers/google/tests/system/google/cloud/bigquery/example_bigquery_queries.py +++ b/providers/google/tests/system/google/cloud/bigquery/example_bigquery_queries.py @@ -25,9 +25,6 @@ from datetime import datetime from pathlib import Path -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID -from providers.openlineage.tests.system.openlineage.operator import OpenLineageTestOperator - from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.bigquery import ( BigQueryCheckOperator, @@ -43,6 +40,8 @@ ) from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from system.openlineage.operator import OpenLineageTestOperator ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/bigquery/example_bigquery_queries_async.py b/providers/google/tests/system/google/cloud/bigquery/example_bigquery_queries_async.py index 19732fdb08217..a68986977d382 100644 --- a/providers/google/tests/system/google/cloud/bigquery/example_bigquery_queries_async.py +++ b/providers/google/tests/system/google/cloud/bigquery/example_bigquery_queries_async.py @@ -25,8 +25,6 @@ from datetime import datetime, timedelta from pathlib import Path -from providers.openlineage.tests.system.openlineage.operator import OpenLineageTestOperator - from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.bigquery import ( BigQueryCheckOperator, @@ -40,6 +38,7 @@ ) from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule +from system.openlineage.operator import OpenLineageTestOperator ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default") diff --git a/providers/google/tests/system/google/cloud/bigquery/example_bigquery_queries_location.py b/providers/google/tests/system/google/cloud/bigquery/example_bigquery_queries_location.py index 57213fcbae7a4..a8983fd8d9e09 100644 --- a/providers/google/tests/system/google/cloud/bigquery/example_bigquery_queries_location.py +++ b/providers/google/tests/system/google/cloud/bigquery/example_bigquery_queries_location.py @@ -24,8 +24,6 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.bigquery import ( BigQueryCheckOperator, @@ -41,6 +39,7 @@ ) from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/bigquery/example_bigquery_tables.py b/providers/google/tests/system/google/cloud/bigquery/example_bigquery_tables.py index 627eb0d238197..60668368be316 100644 --- a/providers/google/tests/system/google/cloud/bigquery/example_bigquery_tables.py +++ b/providers/google/tests/system/google/cloud/bigquery/example_bigquery_tables.py @@ -26,9 +26,6 @@ from datetime import datetime from pathlib import Path -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID -from providers.openlineage.tests.system.openlineage.operator import OpenLineageTestOperator - from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.bigquery import ( BigQueryCreateEmptyDatasetOperator, @@ -44,6 +41,8 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from system.openlineage.operator import OpenLineageTestOperator ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "bigquery_tables" diff --git a/providers/google/tests/system/google/cloud/bigquery/example_bigquery_to_gcs.py b/providers/google/tests/system/google/cloud/bigquery/example_bigquery_to_gcs.py index 5e0770eb57fd1..22634986577a5 100644 --- a/providers/google/tests/system/google/cloud/bigquery/example_bigquery_to_gcs.py +++ b/providers/google/tests/system/google/cloud/bigquery/example_bigquery_to_gcs.py @@ -24,8 +24,6 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.bigquery import ( BigQueryCreateEmptyDatasetOperator, @@ -35,6 +33,7 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.transfers.bigquery_to_gcs import BigQueryToGCSOperator from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "bigquery_to_gcs" diff --git a/providers/google/tests/system/google/cloud/bigquery/example_bigquery_to_gcs_async.py b/providers/google/tests/system/google/cloud/bigquery/example_bigquery_to_gcs_async.py index 8ca02fb11173b..64c642cfbad71 100644 --- a/providers/google/tests/system/google/cloud/bigquery/example_bigquery_to_gcs_async.py +++ b/providers/google/tests/system/google/cloud/bigquery/example_bigquery_to_gcs_async.py @@ -25,9 +25,6 @@ from datetime import datetime from pathlib import Path -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID -from providers.openlineage.tests.system.openlineage.operator import OpenLineageTestOperator - from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.bigquery import ( BigQueryCreateEmptyDatasetOperator, @@ -37,6 +34,8 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.transfers.bigquery_to_gcs import BigQueryToGCSOperator from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from system.openlineage.operator import OpenLineageTestOperator ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "bigquery_to_gcs_async" diff --git a/providers/google/tests/system/google/cloud/bigquery/example_bigquery_transfer.py b/providers/google/tests/system/google/cloud/bigquery/example_bigquery_transfer.py index e5f7ddc8d1503..c9c1ea1165cda 100644 --- a/providers/google/tests/system/google/cloud/bigquery/example_bigquery_transfer.py +++ b/providers/google/tests/system/google/cloud/bigquery/example_bigquery_transfer.py @@ -25,9 +25,6 @@ from datetime import datetime from pathlib import Path -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID -from providers.openlineage.tests.system.openlineage.operator import OpenLineageTestOperator - from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.bigquery import ( BigQueryCreateEmptyDatasetOperator, @@ -38,6 +35,8 @@ from airflow.providers.google.cloud.transfers.bigquery_to_bigquery import BigQueryToBigQueryOperator from airflow.providers.google.cloud.transfers.bigquery_to_gcs import BigQueryToGCSOperator from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from system.openlineage.operator import OpenLineageTestOperator ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/bigtable/example_bigtable.py b/providers/google/tests/system/google/cloud/bigtable/example_bigtable.py index a4dd5f8d7b617..277667f603aa3 100644 --- a/providers/google/tests/system/google/cloud/bigtable/example_bigtable.py +++ b/providers/google/tests/system/google/cloud/bigtable/example_bigtable.py @@ -47,7 +47,7 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.cloud.bigtable import enums from airflow.decorators import task_group from airflow.models.dag import DAG @@ -61,7 +61,7 @@ ) from airflow.providers.google.cloud.sensors.bigtable import BigtableTableReplicationCompletedSensor from airflow.utils.trigger_rule import TriggerRule -from google.cloud.bigtable import enums +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/cloud_batch/example_cloud_batch.py b/providers/google/tests/system/google/cloud/cloud_batch/example_cloud_batch.py index ffb4e21096e8b..15942baa97cd0 100644 --- a/providers/google/tests/system/google/cloud/cloud_batch/example_cloud_batch.py +++ b/providers/google/tests/system/google/cloud/cloud_batch/example_cloud_batch.py @@ -24,7 +24,7 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.cloud import batch_v1 from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.cloud_batch import ( @@ -35,7 +35,7 @@ ) from airflow.providers.standard.operators.python import PythonOperator from airflow.utils.trigger_rule import TriggerRule -from google.cloud import batch_v1 +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") diff --git a/providers/google/tests/system/google/cloud/cloud_build/example_cloud_build.py b/providers/google/tests/system/google/cloud/cloud_build/example_cloud_build.py index 008f149d502a0..2e9b55aeeea3e 100644 --- a/providers/google/tests/system/google/cloud/cloud_build/example_cloud_build.py +++ b/providers/google/tests/system/google/cloud/cloud_build/example_cloud_build.py @@ -27,7 +27,6 @@ from typing import Any, cast import yaml -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID from airflow.decorators import task_group from airflow.models.dag import DAG @@ -40,6 +39,7 @@ CloudBuildRetryBuildOperator, ) from airflow.providers.standard.operators.bash import BashOperator +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/cloud_build/example_cloud_build_trigger.py b/providers/google/tests/system/google/cloud/cloud_build/example_cloud_build_trigger.py index 8eb18e8e8b494..42db8f6ad3a4c 100644 --- a/providers/google/tests/system/google/cloud/cloud_build/example_cloud_build_trigger.py +++ b/providers/google/tests/system/google/cloud/cloud_build/example_cloud_build_trigger.py @@ -27,7 +27,6 @@ from googleapiclient.discovery import build from googleapiclient.errors import HttpError -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID from airflow.decorators import task from airflow.exceptions import AirflowException @@ -42,6 +41,7 @@ CloudBuildUpdateBuildTriggerOperator, ) from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/cloud_functions/example_functions.py b/providers/google/tests/system/google/cloud/cloud_functions/example_functions.py index 24baf2c3e706f..9ce9a3858cbd2 100644 --- a/providers/google/tests/system/google/cloud/cloud_functions/example_functions.py +++ b/providers/google/tests/system/google/cloud/cloud_functions/example_functions.py @@ -27,8 +27,6 @@ from datetime import datetime from typing import Any -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.functions import ( @@ -36,6 +34,7 @@ CloudFunctionDeployFunctionOperator, CloudFunctionInvokeFunctionOperator, ) +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") diff --git a/providers/google/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py b/providers/google/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py index bc2256d029a10..5d425b8b0e3cc 100644 --- a/providers/google/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py +++ b/providers/google/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py @@ -29,6 +29,8 @@ import os from datetime import datetime +from google.protobuf.field_mask_pb2 import FieldMask + from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.cloud_memorystore import ( CloudMemorystoreMemcachedApplyParametersOperator, @@ -41,7 +43,6 @@ ) from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule -from google.protobuf.field_mask_pb2 import FieldMask ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "google_project_id") diff --git a/providers/google/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py b/providers/google/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py index db7768fea9a0d..c70d69bbf1d42 100644 --- a/providers/google/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py +++ b/providers/google/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py @@ -24,7 +24,7 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.cloud.redis_v1 import FailoverInstanceRequest, Instance from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.cloud_memorystore import ( @@ -47,7 +47,7 @@ ) from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule -from google.cloud.redis_v1 import FailoverInstanceRequest, Instance +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") ENV_ID_LOWER = ENV_ID.lower() if ENV_ID else "" diff --git a/providers/google/tests/system/google/cloud/cloud_run/example_cloud_run.py b/providers/google/tests/system/google/cloud/cloud_run/example_cloud_run.py index e5b19851df22d..4aee6882171ed 100644 --- a/providers/google/tests/system/google/cloud/cloud_run/example_cloud_run.py +++ b/providers/google/tests/system/google/cloud/cloud_run/example_cloud_run.py @@ -23,6 +23,9 @@ import os from datetime import datetime +from google.cloud.run_v2 import Job +from google.cloud.run_v2.types import k8s_min + from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.cloud_run import ( CloudRunCreateJobOperator, @@ -33,8 +36,6 @@ ) from airflow.providers.standard.operators.python import PythonOperator from airflow.utils.trigger_rule import TriggerRule -from google.cloud.run_v2 import Job -from google.cloud.run_v2.types import k8s_min ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default") diff --git a/providers/google/tests/system/google/cloud/cloud_run/example_cloud_run_service.py b/providers/google/tests/system/google/cloud/cloud_run/example_cloud_run_service.py index 88e1d63fb8e60..8d87bcbc10033 100644 --- a/providers/google/tests/system/google/cloud/cloud_run/example_cloud_run_service.py +++ b/providers/google/tests/system/google/cloud/cloud_run/example_cloud_run_service.py @@ -24,13 +24,14 @@ import os from datetime import datetime +from google.cloud.run_v2 import Service +from google.cloud.run_v2.types import k8s_min + from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.cloud_run import ( CloudRunCreateServiceOperator, CloudRunDeleteServiceOperator, ) -from google.cloud.run_v2 import Service -from google.cloud.run_v2.types import k8s_min PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default") ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") diff --git a/providers/google/tests/system/google/cloud/cloud_sql/example_cloud_sql.py b/providers/google/tests/system/google/cloud/cloud_sql/example_cloud_sql.py index d48d77260c7bd..7aa4f7bce4d18 100644 --- a/providers/google/tests/system/google/cloud/cloud_sql/example_cloud_sql.py +++ b/providers/google/tests/system/google/cloud/cloud_sql/example_cloud_sql.py @@ -27,8 +27,6 @@ from datetime import datetime from urllib.parse import urlsplit -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.models.dag import DAG from airflow.models.xcom_arg import XComArg from airflow.providers.google.cloud.operators.cloud_sql import ( @@ -49,6 +47,7 @@ GCSObjectCreateAclEntryOperator, ) from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/cloud_sql/example_cloud_sql_query.py b/providers/google/tests/system/google/cloud/cloud_sql/example_cloud_sql_query.py index b580d6e8a0908..b79a7af568db7 100644 --- a/providers/google/tests/system/google/cloud/cloud_sql/example_cloud_sql_query.py +++ b/providers/google/tests/system/google/cloud/cloud_sql/example_cloud_sql_query.py @@ -31,8 +31,6 @@ from typing import Any from googleapiclient import discovery -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID -from providers.openlineage.tests.system.openlineage.operator import OpenLineageTestOperator from airflow import settings from airflow.decorators import task, task_group @@ -46,6 +44,8 @@ ) from airflow.settings import Session from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from system.openlineage.operator import OpenLineageTestOperator ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/cloud_sql/example_cloud_sql_query_iam.py b/providers/google/tests/system/google/cloud/cloud_sql/example_cloud_sql_query_iam.py index 4ba1824449f66..b29c37fa8ed9e 100644 --- a/providers/google/tests/system/google/cloud/cloud_sql/example_cloud_sql_query_iam.py +++ b/providers/google/tests/system/google/cloud/cloud_sql/example_cloud_sql_query_iam.py @@ -32,7 +32,6 @@ from typing import Any from googleapiclient import discovery -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID from airflow import settings from airflow.decorators import task @@ -47,6 +46,7 @@ ) from airflow.settings import Session from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py b/providers/google/tests/system/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py index c4a6e01f035a6..8a5d8e6a5f2a2 100644 --- a/providers/google/tests/system/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py +++ b/providers/google/tests/system/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py @@ -34,7 +34,6 @@ from typing import Any from googleapiclient import discovery -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID from airflow import settings from airflow.decorators import task @@ -50,6 +49,7 @@ ) from airflow.settings import Session from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/compute/example_compute.py b/providers/google/tests/system/google/cloud/compute/example_compute.py index f49790f1f7db5..06a565cea621e 100644 --- a/providers/google/tests/system/google/cloud/compute/example_compute.py +++ b/providers/google/tests/system/google/cloud/compute/example_compute.py @@ -27,8 +27,6 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.compute import ( ComputeEngineDeleteInstanceOperator, @@ -41,6 +39,7 @@ ComputeEngineStopInstanceOperator, ) from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID # [START howto_operator_gce_args_common] ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") diff --git a/providers/google/tests/system/google/cloud/compute/example_compute_igm.py b/providers/google/tests/system/google/cloud/compute/example_compute_igm.py index 3ed7c8ce5bf4e..4afe851a69dfa 100644 --- a/providers/google/tests/system/google/cloud/compute/example_compute_igm.py +++ b/providers/google/tests/system/google/cloud/compute/example_compute_igm.py @@ -28,8 +28,6 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.compute import ( ComputeEngineCopyInstanceTemplateOperator, @@ -40,6 +38,7 @@ ComputeEngineInstanceGroupUpdateManagerTemplateOperator, ) from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/compute/example_compute_ssh.py b/providers/google/tests/system/google/cloud/compute/example_compute_ssh.py index b53bbc8ebcb55..c241c70e4afe7 100644 --- a/providers/google/tests/system/google/cloud/compute/example_compute_ssh.py +++ b/providers/google/tests/system/google/cloud/compute/example_compute_ssh.py @@ -26,8 +26,6 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.models.dag import DAG from airflow.providers.google.cloud.hooks.compute_ssh import ComputeEngineSSHHook from airflow.providers.google.cloud.operators.compute import ( @@ -36,6 +34,7 @@ ) from airflow.providers.ssh.operators.ssh import SSHOperator from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID # [START howto_operator_gce_args_common] ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") diff --git a/providers/google/tests/system/google/cloud/compute/example_compute_ssh_os_login.py b/providers/google/tests/system/google/cloud/compute/example_compute_ssh_os_login.py index 6397f75224c23..9b49e612615b3 100644 --- a/providers/google/tests/system/google/cloud/compute/example_compute_ssh_os_login.py +++ b/providers/google/tests/system/google/cloud/compute/example_compute_ssh_os_login.py @@ -26,8 +26,6 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.models.dag import DAG from airflow.providers.google.cloud.hooks.compute_ssh import ComputeEngineSSHHook from airflow.providers.google.cloud.operators.compute import ( @@ -36,6 +34,7 @@ ) from airflow.providers.ssh.operators.ssh import SSHOperator from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID # [START howto_operator_gce_args_common] ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") diff --git a/providers/google/tests/system/google/cloud/compute/example_compute_ssh_parallel.py b/providers/google/tests/system/google/cloud/compute/example_compute_ssh_parallel.py index d389a5e724b9e..512d6e07b1aaf 100644 --- a/providers/google/tests/system/google/cloud/compute/example_compute_ssh_parallel.py +++ b/providers/google/tests/system/google/cloud/compute/example_compute_ssh_parallel.py @@ -26,8 +26,6 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.models.dag import DAG from airflow.providers.google.cloud.hooks.compute_ssh import ComputeEngineSSHHook from airflow.providers.google.cloud.operators.compute import ( @@ -36,6 +34,7 @@ ) from airflow.providers.ssh.operators.ssh import SSHOperator from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID # [START howto_operator_gce_args_common] ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") diff --git a/providers/google/tests/system/google/cloud/data_loss_prevention/example_dlp_deidentify_content.py b/providers/google/tests/system/google/cloud/data_loss_prevention/example_dlp_deidentify_content.py index 3e4259e3ae8f0..fbcccbe86483e 100644 --- a/providers/google/tests/system/google/cloud/data_loss_prevention/example_dlp_deidentify_content.py +++ b/providers/google/tests/system/google/cloud/data_loss_prevention/example_dlp_deidentify_content.py @@ -25,7 +25,7 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.cloud.dlp_v2.types import ContentItem, DeidentifyTemplate, InspectConfig from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.dlp import ( @@ -38,7 +38,7 @@ CloudDLPUpdateDeidentifyTemplateOperator, ) from airflow.utils.trigger_rule import TriggerRule -from google.cloud.dlp_v2.types import ContentItem, DeidentifyTemplate, InspectConfig +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID DAG_ID = "dlp_deidentify_content" ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") diff --git a/providers/google/tests/system/google/cloud/data_loss_prevention/example_dlp_info_types.py b/providers/google/tests/system/google/cloud/data_loss_prevention/example_dlp_info_types.py index 0d06854dfef1f..a7e414b130734 100644 --- a/providers/google/tests/system/google/cloud/data_loss_prevention/example_dlp_info_types.py +++ b/providers/google/tests/system/google/cloud/data_loss_prevention/example_dlp_info_types.py @@ -26,7 +26,8 @@ from datetime import datetime from pathlib import Path -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.cloud.dlp_v2 import StoredInfoTypeConfig +from google.cloud.dlp_v2.types import ContentItem, InspectConfig, InspectTemplate from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.dlp import ( @@ -40,8 +41,7 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from google.cloud.dlp_v2 import StoredInfoTypeConfig -from google.cloud.dlp_v2.types import ContentItem, InspectConfig, InspectTemplate +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID DAG_ID = "dlp_info_types" ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") diff --git a/providers/google/tests/system/google/cloud/data_loss_prevention/example_dlp_inspect_template.py b/providers/google/tests/system/google/cloud/data_loss_prevention/example_dlp_inspect_template.py index 04431ad660e76..44cf467999791 100644 --- a/providers/google/tests/system/google/cloud/data_loss_prevention/example_dlp_inspect_template.py +++ b/providers/google/tests/system/google/cloud/data_loss_prevention/example_dlp_inspect_template.py @@ -25,7 +25,7 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.cloud.dlp_v2.types import ContentItem, InspectConfig, InspectTemplate from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.dlp import ( @@ -37,7 +37,7 @@ CloudDLPUpdateInspectTemplateOperator, ) from airflow.utils.trigger_rule import TriggerRule -from google.cloud.dlp_v2.types import ContentItem, InspectConfig, InspectTemplate +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID DAG_ID = "dlp_inspect_template" ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") diff --git a/providers/google/tests/system/google/cloud/data_loss_prevention/example_dlp_job.py b/providers/google/tests/system/google/cloud/data_loss_prevention/example_dlp_job.py index ec83b945ed9a6..a1409217b43ee 100644 --- a/providers/google/tests/system/google/cloud/data_loss_prevention/example_dlp_job.py +++ b/providers/google/tests/system/google/cloud/data_loss_prevention/example_dlp_job.py @@ -25,7 +25,7 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.cloud.dlp_v2.types import InspectConfig, InspectJobConfig from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.dlp import ( @@ -36,7 +36,7 @@ CloudDLPListDLPJobsOperator, ) from airflow.utils.trigger_rule import TriggerRule -from google.cloud.dlp_v2.types import InspectConfig, InspectJobConfig +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID DAG_ID = "dlp_job" ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") diff --git a/providers/google/tests/system/google/cloud/data_loss_prevention/example_dlp_job_trigger.py b/providers/google/tests/system/google/cloud/data_loss_prevention/example_dlp_job_trigger.py index 0a970f818257a..d6a7238461e63 100644 --- a/providers/google/tests/system/google/cloud/data_loss_prevention/example_dlp_job_trigger.py +++ b/providers/google/tests/system/google/cloud/data_loss_prevention/example_dlp_job_trigger.py @@ -25,8 +25,6 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.dlp import ( CloudDLPCreateJobTriggerOperator, @@ -36,6 +34,7 @@ CloudDLPUpdateJobTriggerOperator, ) from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID DAG_ID = "dlp_job_trigger" ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") diff --git a/providers/google/tests/system/google/cloud/dataflow/example_dataflow_template.py b/providers/google/tests/system/google/cloud/dataflow/example_dataflow_template.py index 46cdfba44fd00..2d6947461e812 100644 --- a/providers/google/tests/system/google/cloud/dataflow/example_dataflow_template.py +++ b/providers/google/tests/system/google/cloud/dataflow/example_dataflow_template.py @@ -28,8 +28,6 @@ from datetime import datetime from pathlib import Path -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.dataflow import ( @@ -39,6 +37,7 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/dataflow/example_dataflow_yaml.py b/providers/google/tests/system/google/cloud/dataflow/example_dataflow_yaml.py index b9c5a6f6d797f..f5b42bdb64d82 100644 --- a/providers/google/tests/system/google/cloud/dataflow/example_dataflow_yaml.py +++ b/providers/google/tests/system/google/cloud/dataflow/example_dataflow_yaml.py @@ -29,8 +29,6 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.models.dag import DAG from airflow.providers.google.cloud.hooks.dataflow import DataflowJobStatus from airflow.providers.google.cloud.operators.bigquery import ( @@ -41,6 +39,7 @@ ) from airflow.providers.google.cloud.operators.dataflow import DataflowStartYamlJobOperator from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") diff --git a/providers/google/tests/system/google/cloud/dataform/example_dataform.py b/providers/google/tests/system/google/cloud/dataform/example_dataform.py index 994cbab105783..2819866bdb93e 100644 --- a/providers/google/tests/system/google/cloud/dataform/example_dataform.py +++ b/providers/google/tests/system/google/cloud/dataform/example_dataform.py @@ -24,7 +24,7 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.cloud.dataform_v1beta1 import WorkflowInvocation, WorkflowInvocationAction from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.bigquery import BigQueryDeleteDatasetOperator @@ -51,7 +51,7 @@ ) from airflow.providers.google.cloud.utils.dataform import make_initialization_workspace_flow from airflow.utils.trigger_rule import TriggerRule -from google.cloud.dataform_v1beta1 import WorkflowInvocation, WorkflowInvocationAction +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/datafusion/example_datafusion.py b/providers/google/tests/system/google/cloud/datafusion/example_datafusion.py index 0131ed6c2b87d..239965b283088 100644 --- a/providers/google/tests/system/google/cloud/datafusion/example_datafusion.py +++ b/providers/google/tests/system/google/cloud/datafusion/example_datafusion.py @@ -23,8 +23,6 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.decorators import task from airflow.models.dag import DAG from airflow.providers.google.cloud.hooks.datafusion import DataFusionHook @@ -43,6 +41,7 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.sensors.datafusion import CloudDataFusionPipelineStateSensor from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID # [START howto_data_fusion_env_variables] SERVICE_ACCOUNT = os.environ.get("GCP_DATAFUSION_SERVICE_ACCOUNT") diff --git a/providers/google/tests/system/google/cloud/dataplex/example_dataplex.py b/providers/google/tests/system/google/cloud/dataplex/example_dataplex.py index 5c719a84fb163..1b2ebd1e364af 100644 --- a/providers/google/tests/system/google/cloud/dataplex/example_dataplex.py +++ b/providers/google/tests/system/google/cloud/dataplex/example_dataplex.py @@ -23,8 +23,6 @@ import datetime import os -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.dataplex import ( @@ -42,6 +40,7 @@ ) from airflow.providers.google.cloud.sensors.dataplex import DataplexTaskStateSensor from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/dataplex/example_dataplex_catalog.py b/providers/google/tests/system/google/cloud/dataplex/example_dataplex_catalog.py index 9c5da7533ebac..9c86c55f519f1 100644 --- a/providers/google/tests/system/google/cloud/dataplex/example_dataplex_catalog.py +++ b/providers/google/tests/system/google/cloud/dataplex/example_dataplex_catalog.py @@ -23,8 +23,6 @@ import datetime import os -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.dataplex import ( DataplexCatalogCreateAspectTypeOperator, @@ -44,6 +42,7 @@ DataplexCatalogUpdateEntryTypeOperator, ) from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/dataplex/example_dataplex_dp.py b/providers/google/tests/system/google/cloud/dataplex/example_dataplex_dp.py index 730eff97b0ce1..beea45d2f4fbf 100644 --- a/providers/google/tests/system/google/cloud/dataplex/example_dataplex_dp.py +++ b/providers/google/tests/system/google/cloud/dataplex/example_dataplex_dp.py @@ -23,7 +23,9 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.cloud import dataplex_v1 +from google.cloud.dataplex_v1 import DataProfileSpec +from google.protobuf.field_mask_pb2 import FieldMask from airflow.models.baseoperator import chain from airflow.models.dag import DAG @@ -48,9 +50,7 @@ ) from airflow.providers.google.cloud.sensors.dataplex import DataplexDataProfileJobStatusSensor from airflow.utils.trigger_rule import TriggerRule -from google.cloud import dataplex_v1 -from google.cloud.dataplex_v1 import DataProfileSpec -from google.protobuf.field_mask_pb2 import FieldMask +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/dataplex/example_dataplex_dq.py b/providers/google/tests/system/google/cloud/dataplex/example_dataplex_dq.py index 64582059c0f2f..ba1f7e3a175fa 100644 --- a/providers/google/tests/system/google/cloud/dataplex/example_dataplex_dq.py +++ b/providers/google/tests/system/google/cloud/dataplex/example_dataplex_dq.py @@ -23,7 +23,9 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.cloud import dataplex_v1 +from google.cloud.dataplex_v1 import DataQualitySpec +from google.protobuf.field_mask_pb2 import FieldMask from airflow.models.baseoperator import chain from airflow.models.dag import DAG @@ -48,9 +50,7 @@ ) from airflow.providers.google.cloud.sensors.dataplex import DataplexDataQualityJobStatusSensor from airflow.utils.trigger_rule import TriggerRule -from google.cloud import dataplex_v1 -from google.cloud.dataplex_v1 import DataQualitySpec -from google.protobuf.field_mask_pb2 import FieldMask +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/dataprep/example_dataprep.py b/providers/google/tests/system/google/cloud/dataprep/example_dataprep.py index 9239bc6daddea..5a1dcc1ef7442 100644 --- a/providers/google/tests/system/google/cloud/dataprep/example_dataprep.py +++ b/providers/google/tests/system/google/cloud/dataprep/example_dataprep.py @@ -30,8 +30,6 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow import models from airflow.decorators import task from airflow.models import Connection @@ -49,6 +47,7 @@ from airflow.providers.google.cloud.sensors.dataprep import DataprepJobGroupIsFinishedSensor from airflow.settings import Session from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataprep" diff --git a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_batch.py b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_batch.py index cae98ff97f097..489c6c68b73a3 100644 --- a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_batch.py +++ b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_batch.py @@ -23,7 +23,7 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.api_core.retry import Retry from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.dataproc import ( @@ -35,7 +35,7 @@ ) from airflow.providers.google.cloud.sensors.dataproc import DataprocBatchSensor from airflow.utils.trigger_rule import TriggerRule -from google.api_core.retry import Retry +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_batch_deferrable.py b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_batch_deferrable.py index d9f70862df075..7ff0f7513b8f4 100644 --- a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_batch_deferrable.py +++ b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_batch_deferrable.py @@ -25,7 +25,7 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.api_core.retry import Retry from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.dataproc import ( @@ -34,7 +34,7 @@ DataprocGetBatchOperator, ) from airflow.utils.trigger_rule import TriggerRule -from google.api_core.retry import Retry +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_batch_deferrable" diff --git a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_batch_persistent.py b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_batch_persistent.py index ef020e72f4ade..184ea1d322ef6 100644 --- a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_batch_persistent.py +++ b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_batch_persistent.py @@ -23,7 +23,7 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.api_core.retry import Retry from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.dataproc import ( @@ -35,7 +35,7 @@ ) from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.utils.trigger_rule import TriggerRule -from google.api_core.retry import Retry +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_batch_ps" diff --git a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_cluster_create_existing_stopped_cluster.py b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_cluster_create_existing_stopped_cluster.py index fef26e6b3bd2e..67534dc1e4311 100644 --- a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_cluster_create_existing_stopped_cluster.py +++ b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_cluster_create_existing_stopped_cluster.py @@ -24,7 +24,7 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.api_core.retry import Retry from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.dataproc import ( @@ -34,7 +34,7 @@ DataprocStopClusterOperator, ) from airflow.utils.trigger_rule import TriggerRule -from google.api_core.retry import Retry +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID DAG_ID = "dataproc_create_existing_stopped_cluster" diff --git a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_cluster_deferrable.py b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_cluster_deferrable.py index 2e9632f7aa16c..250543498c38b 100644 --- a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_cluster_deferrable.py +++ b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_cluster_deferrable.py @@ -24,7 +24,7 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.api_core.retry import Retry from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.dataproc import ( @@ -33,7 +33,7 @@ DataprocUpdateClusterOperator, ) from airflow.utils.trigger_rule import TriggerRule -from google.api_core.retry import Retry +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_cluster_def" diff --git a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_cluster_diagnose.py b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_cluster_diagnose.py index 5cfa440551fba..83b4c122bcba1 100644 --- a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_cluster_diagnose.py +++ b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_cluster_diagnose.py @@ -24,7 +24,7 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.api_core.retry import Retry from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.dataproc import ( @@ -33,7 +33,7 @@ DataprocDiagnoseClusterOperator, ) from airflow.utils.trigger_rule import TriggerRule -from google.api_core.retry import Retry +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_diagnose_cluster" diff --git a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_cluster_generator.py b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_cluster_generator.py index 7df61084b9047..02912fd43ff1e 100644 --- a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_cluster_generator.py +++ b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_cluster_generator.py @@ -25,7 +25,7 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.api_core.retry import Retry from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.dataproc import ( @@ -39,7 +39,7 @@ GCSSynchronizeBucketsOperator, ) from airflow.utils.trigger_rule import TriggerRule -from google.api_core.retry import Retry +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_cluster_generation" diff --git a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_cluster_start_stop.py b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_cluster_start_stop.py index 2fb61b116c723..965eaa03e2ae7 100644 --- a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_cluster_start_stop.py +++ b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_cluster_start_stop.py @@ -24,7 +24,7 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.api_core.retry import Retry from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.dataproc import ( @@ -34,7 +34,7 @@ DataprocStopClusterOperator, ) from airflow.utils.trigger_rule import TriggerRule -from google.api_core.retry import Retry +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID DAG_ID = "dataproc_cluster_start_stop" diff --git a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_cluster_update.py b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_cluster_update.py index f45b7b9a5dc30..f1138cfcfc72c 100644 --- a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_cluster_update.py +++ b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_cluster_update.py @@ -24,7 +24,7 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.api_core.retry import Retry from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.dataproc import ( @@ -33,7 +33,7 @@ DataprocUpdateClusterOperator, ) from airflow.utils.trigger_rule import TriggerRule -from google.api_core.retry import Retry +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_update" diff --git a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_flink.py b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_flink.py index 86cd7071af114..927e5a6866f26 100644 --- a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_flink.py +++ b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_flink.py @@ -24,7 +24,7 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.api_core.retry import Retry from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.dataproc import ( @@ -34,7 +34,7 @@ ) from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.utils.trigger_rule import TriggerRule -from google.api_core.retry import Retry +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_flink" diff --git a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_gke.py b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_gke.py index 31853af81bc69..565fae8246d91 100644 --- a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_gke.py +++ b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_gke.py @@ -31,7 +31,7 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.api_core.retry import Retry from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.dataproc import ( @@ -43,7 +43,7 @@ GKEDeleteClusterOperator, ) from airflow.utils.trigger_rule import TriggerRule -from google.api_core.retry import Retry +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_gke" diff --git a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_hadoop.py b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_hadoop.py index 957fb62ed4423..b4add2ffc48a4 100644 --- a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_hadoop.py +++ b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_hadoop.py @@ -24,7 +24,7 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.api_core.retry import Retry from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.dataproc import ( @@ -34,7 +34,7 @@ ) from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.utils.trigger_rule import TriggerRule -from google.api_core.retry import Retry +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_hadoop" diff --git a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_hive.py b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_hive.py index d0874e153129b..4701c3fca30f8 100644 --- a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_hive.py +++ b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_hive.py @@ -24,7 +24,7 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.api_core.retry import Retry from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.dataproc import ( @@ -33,7 +33,7 @@ DataprocSubmitJobOperator, ) from airflow.utils.trigger_rule import TriggerRule -from google.api_core.retry import Retry +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_hive" diff --git a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_pig.py b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_pig.py index 84ab758fa9e6e..350afbabbd32d 100644 --- a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_pig.py +++ b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_pig.py @@ -24,7 +24,7 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.api_core.retry import Retry from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.dataproc import ( @@ -33,7 +33,7 @@ DataprocSubmitJobOperator, ) from airflow.utils.trigger_rule import TriggerRule -from google.api_core.retry import Retry +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID DAG_ID = "dataproc_pig" ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") diff --git a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_presto.py b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_presto.py index 0a78dd0446c73..9167ca47e6561 100644 --- a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_presto.py +++ b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_presto.py @@ -24,7 +24,7 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.api_core.retry import Retry from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.dataproc import ( @@ -33,7 +33,7 @@ DataprocSubmitJobOperator, ) from airflow.utils.trigger_rule import TriggerRule -from google.api_core.retry import Retry +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_presto" diff --git a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_pyspark.py b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_pyspark.py index b6dcde90a307c..677ce2a09a6a1 100644 --- a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_pyspark.py +++ b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_pyspark.py @@ -24,7 +24,7 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.api_core.retry import Retry from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.dataproc import ( @@ -38,7 +38,7 @@ GCSSynchronizeBucketsOperator, ) from airflow.utils.trigger_rule import TriggerRule -from google.api_core.retry import Retry +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_pyspark" diff --git a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_spark.py b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_spark.py index fec36bf570c90..826e6a041e13e 100644 --- a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_spark.py +++ b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_spark.py @@ -24,7 +24,7 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.api_core.retry import Retry from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.dataproc import ( @@ -33,7 +33,7 @@ DataprocSubmitJobOperator, ) from airflow.utils.trigger_rule import TriggerRule -from google.api_core.retry import Retry +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_spark" diff --git a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_spark_async.py b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_spark_async.py index 499f42dd32bb9..58e533dfb2c5a 100644 --- a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_spark_async.py +++ b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_spark_async.py @@ -24,7 +24,7 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.api_core.retry import Retry from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.dataproc import ( @@ -34,7 +34,7 @@ ) from airflow.providers.google.cloud.sensors.dataproc import DataprocJobSensor from airflow.utils.trigger_rule import TriggerRule -from google.api_core.retry import Retry +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_spark_async" diff --git a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_spark_deferrable.py b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_spark_deferrable.py index db53fa7bf76ef..4a23b03296213 100644 --- a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_spark_deferrable.py +++ b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_spark_deferrable.py @@ -25,7 +25,7 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.api_core.retry import Retry from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.dataproc import ( @@ -34,7 +34,7 @@ DataprocSubmitJobOperator, ) from airflow.utils.trigger_rule import TriggerRule -from google.api_core.retry import Retry +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_spark_deferrable" diff --git a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_spark_sql.py b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_spark_sql.py index c4e48ef625e02..dc01b616b40b0 100644 --- a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_spark_sql.py +++ b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_spark_sql.py @@ -24,7 +24,7 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.api_core.retry import Retry from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.dataproc import ( @@ -33,7 +33,7 @@ DataprocSubmitJobOperator, ) from airflow.utils.trigger_rule import TriggerRule -from google.api_core.retry import Retry +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_spark_sql" diff --git a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_sparkr.py b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_sparkr.py index dfba6c875f4ee..a9cd93c64ada2 100644 --- a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_sparkr.py +++ b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_sparkr.py @@ -24,7 +24,7 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.api_core.retry import Retry from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.dataproc import ( @@ -38,7 +38,7 @@ GCSSynchronizeBucketsOperator, ) from airflow.utils.trigger_rule import TriggerRule -from google.api_core.retry import Retry +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_sparkr" diff --git a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_trino.py b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_trino.py index 315a2854ba4e1..2bb221a0abbe3 100644 --- a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_trino.py +++ b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_trino.py @@ -24,7 +24,7 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.api_core.retry import Retry from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.dataproc import ( @@ -33,7 +33,7 @@ DataprocSubmitJobOperator, ) from airflow.utils.trigger_rule import TriggerRule -from google.api_core.retry import Retry +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_trino" diff --git a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_workflow.py b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_workflow.py index c2e5624f7119f..a38af175b4b14 100644 --- a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_workflow.py +++ b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_workflow.py @@ -23,14 +23,13 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.dataproc import ( DataprocCreateWorkflowTemplateOperator, DataprocInstantiateInlineWorkflowTemplateOperator, DataprocInstantiateWorkflowTemplateOperator, ) +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_workflow" diff --git a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_workflow_deferrable.py b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_workflow_deferrable.py index ebfb6e9af9245..f1311e4684db8 100644 --- a/providers/google/tests/system/google/cloud/dataproc/example_dataproc_workflow_deferrable.py +++ b/providers/google/tests/system/google/cloud/dataproc/example_dataproc_workflow_deferrable.py @@ -23,14 +23,13 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.dataproc import ( DataprocCreateWorkflowTemplateOperator, DataprocInstantiateInlineWorkflowTemplateOperator, DataprocInstantiateWorkflowTemplateOperator, ) +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_workflow_def" diff --git a/providers/google/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore.py b/providers/google/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore.py index c81a220184d35..fec530b9c7cbb 100644 --- a/providers/google/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore.py +++ b/providers/google/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore.py @@ -25,7 +25,7 @@ import datetime import os -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.protobuf.field_mask_pb2 import FieldMask from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.dataproc_metastore import ( @@ -42,7 +42,7 @@ GCSSynchronizeBucketsOperator, ) from airflow.utils.trigger_rule import TriggerRule -from google.protobuf.field_mask_pb2 import FieldMask +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID DAG_ID = "dataproc_metastore" PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py b/providers/google/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py index d2c58bc7166c3..5300c6c2186c9 100644 --- a/providers/google/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py +++ b/providers/google/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py @@ -25,8 +25,6 @@ import datetime import os -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.dataproc_metastore import ( DataprocMetastoreCreateBackupOperator, @@ -37,6 +35,7 @@ DataprocMetastoreRestoreServiceOperator, ) from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID DAG_ID = "dataproc_metastore_backup" diff --git a/providers/google/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_hive_partition_sensor.py b/providers/google/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_hive_partition_sensor.py index 12173a10ca76c..17f1ecf80f266 100644 --- a/providers/google/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_hive_partition_sensor.py +++ b/providers/google/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_hive_partition_sensor.py @@ -26,8 +26,6 @@ import datetime import os -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.decorators import task from airflow.models.dag import DAG from airflow.providers.google.cloud.hooks.gcs import _parse_gcs_url @@ -44,6 +42,7 @@ from airflow.providers.google.cloud.sensors.dataproc_metastore import MetastoreHivePartitionSensor from airflow.providers.google.cloud.transfers.gcs_to_gcs import GCSToGCSOperator from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID DAG_ID = "hive_partition_sensor" PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/datastore/example_datastore_commit.py b/providers/google/tests/system/google/cloud/datastore/example_datastore_commit.py index 8a0751ff2e388..9921fb30f6953 100644 --- a/providers/google/tests/system/google/cloud/datastore/example_datastore_commit.py +++ b/providers/google/tests/system/google/cloud/datastore/example_datastore_commit.py @@ -25,8 +25,6 @@ from datetime import datetime from typing import Any -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.datastore import ( @@ -40,6 +38,7 @@ ) from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/datastore/example_datastore_query.py b/providers/google/tests/system/google/cloud/datastore/example_datastore_query.py index 531449962749e..025796a1722cd 100644 --- a/providers/google/tests/system/google/cloud/datastore/example_datastore_query.py +++ b/providers/google/tests/system/google/cloud/datastore/example_datastore_query.py @@ -25,14 +25,13 @@ from datetime import datetime from typing import Any -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.datastore import ( CloudDatastoreAllocateIdsOperator, CloudDatastoreBeginTransactionOperator, CloudDatastoreRunQueryOperator, ) +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/datastore/example_datastore_rollback.py b/providers/google/tests/system/google/cloud/datastore/example_datastore_rollback.py index ddbd772052d2a..a46d02bb64918 100644 --- a/providers/google/tests/system/google/cloud/datastore/example_datastore_rollback.py +++ b/providers/google/tests/system/google/cloud/datastore/example_datastore_rollback.py @@ -25,13 +25,12 @@ from datetime import datetime from typing import Any -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.datastore import ( CloudDatastoreBeginTransactionOperator, CloudDatastoreRollbackOperator, ) +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/gcs/example_gcs_acl.py b/providers/google/tests/system/google/cloud/gcs/example_gcs_acl.py index 3cd8a3fb11568..ad96ace96b7d2 100644 --- a/providers/google/tests/system/google/cloud/gcs/example_gcs_acl.py +++ b/providers/google/tests/system/google/cloud/gcs/example_gcs_acl.py @@ -24,8 +24,6 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.gcs import ( GCSBucketCreateAclEntryOperator, @@ -35,6 +33,7 @@ ) from airflow.providers.google.cloud.transfers.gcs_to_gcs import GCSToGCSOperator from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/gcs/example_gcs_copy_delete.py b/providers/google/tests/system/google/cloud/gcs/example_gcs_copy_delete.py index d35330e411912..1ee4fc5c98cb8 100644 --- a/providers/google/tests/system/google/cloud/gcs/example_gcs_copy_delete.py +++ b/providers/google/tests/system/google/cloud/gcs/example_gcs_copy_delete.py @@ -26,9 +26,6 @@ from datetime import datetime from pathlib import Path -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID -from providers.openlineage.tests.system.openlineage.operator import OpenLineageTestOperator - from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.gcs import ( @@ -40,6 +37,8 @@ from airflow.providers.google.cloud.transfers.gcs_to_gcs import GCSToGCSOperator from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from system.openlineage.operator import OpenLineageTestOperator ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/gcs/example_gcs_sensor.py b/providers/google/tests/system/google/cloud/gcs/example_gcs_sensor.py index 9f3de9680f455..4529eea221344 100644 --- a/providers/google/tests/system/google/cloud/gcs/example_gcs_sensor.py +++ b/providers/google/tests/system/google/cloud/gcs/example_gcs_sensor.py @@ -24,8 +24,6 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator @@ -37,6 +35,7 @@ ) from airflow.providers.google.cloud.transfers.gcs_to_gcs import GCSToGCSOperator from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/gcs/example_gcs_to_bigquery.py b/providers/google/tests/system/google/cloud/gcs/example_gcs_to_bigquery.py index c9e3ce566be18..e5adf82a8c463 100644 --- a/providers/google/tests/system/google/cloud/gcs/example_gcs_to_bigquery.py +++ b/providers/google/tests/system/google/cloud/gcs/example_gcs_to_bigquery.py @@ -25,9 +25,6 @@ from datetime import datetime from pathlib import Path -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID -from providers.openlineage.tests.system.openlineage.operator import OpenLineageTestOperator - from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.bigquery import ( BigQueryCreateEmptyDatasetOperator, @@ -35,6 +32,8 @@ ) from airflow.providers.google.cloud.transfers.gcs_to_bigquery import GCSToBigQueryOperator from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from system.openlineage.operator import OpenLineageTestOperator ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "gcs_to_bigquery_operator" diff --git a/providers/google/tests/system/google/cloud/gcs/example_gcs_to_bigquery_async.py b/providers/google/tests/system/google/cloud/gcs/example_gcs_to_bigquery_async.py index 95a397b2a59c2..5b5d67161e9ae 100644 --- a/providers/google/tests/system/google/cloud/gcs/example_gcs_to_bigquery_async.py +++ b/providers/google/tests/system/google/cloud/gcs/example_gcs_to_bigquery_async.py @@ -25,9 +25,6 @@ from datetime import datetime from pathlib import Path -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID -from providers.openlineage.tests.system.openlineage.operator import OpenLineageTestOperator - from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.bigquery import ( BigQueryCreateEmptyDatasetOperator, @@ -35,6 +32,8 @@ ) from airflow.providers.google.cloud.transfers.gcs_to_bigquery import GCSToBigQueryOperator from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from system.openlineage.operator import OpenLineageTestOperator ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/gcs/example_gcs_to_gcs.py b/providers/google/tests/system/google/cloud/gcs/example_gcs_to_gcs.py index 71f5034a2b9b5..559af7ccff13b 100644 --- a/providers/google/tests/system/google/cloud/gcs/example_gcs_to_gcs.py +++ b/providers/google/tests/system/google/cloud/gcs/example_gcs_to_gcs.py @@ -27,9 +27,6 @@ from datetime import datetime from pathlib import Path -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID -from providers.openlineage.tests.system.openlineage.operator import OpenLineageTestOperator - from airflow.decorators import task from airflow.models.baseoperator import chain from airflow.models.dag import DAG @@ -44,6 +41,8 @@ from airflow.providers.standard.operators.bash import BashOperator from airflow.providers.standard.operators.python import PythonOperator from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from system.openlineage.operator import OpenLineageTestOperator ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/gcs/example_gcs_to_gdrive.py b/providers/google/tests/system/google/cloud/gcs/example_gcs_to_gdrive.py index 55564d441b772..6e17ba7263ab9 100644 --- a/providers/google/tests/system/google/cloud/gcs/example_gcs_to_gdrive.py +++ b/providers/google/tests/system/google/cloud/gcs/example_gcs_to_gdrive.py @@ -30,8 +30,6 @@ from datetime import datetime from pathlib import Path -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.decorators import task from airflow.models import Connection from airflow.models.dag import DAG @@ -41,6 +39,7 @@ from airflow.providers.google.suite.transfers.gcs_to_gdrive import GCSToGoogleDriveOperator from airflow.settings import Session from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/gcs/example_gcs_to_sftp.py b/providers/google/tests/system/google/cloud/gcs/example_gcs_to_sftp.py index 607858cadd3e7..4be0d2ae0b0fe 100644 --- a/providers/google/tests/system/google/cloud/gcs/example_gcs_to_sftp.py +++ b/providers/google/tests/system/google/cloud/gcs/example_gcs_to_sftp.py @@ -25,9 +25,6 @@ from datetime import datetime from pathlib import Path -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID -from providers.openlineage.tests.system.openlineage.operator import OpenLineageTestOperator - from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator @@ -35,6 +32,8 @@ from airflow.providers.google.cloud.transfers.sftp_to_gcs import SFTPToGCSOperator from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from system.openlineage.operator import OpenLineageTestOperator ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/gcs/example_gcs_transform.py b/providers/google/tests/system/google/cloud/gcs/example_gcs_transform.py index a461a092a2ba1..e2503e0ea52db 100644 --- a/providers/google/tests/system/google/cloud/gcs/example_gcs_transform.py +++ b/providers/google/tests/system/google/cloud/gcs/example_gcs_transform.py @@ -25,9 +25,6 @@ from datetime import datetime from pathlib import Path -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID -from providers.openlineage.tests.system.openlineage.operator import OpenLineageTestOperator - from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.gcs import ( GCSCreateBucketOperator, @@ -36,6 +33,8 @@ ) from airflow.providers.google.cloud.transfers.gcs_to_gcs import GCSToGCSOperator from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from system.openlineage.operator import OpenLineageTestOperator ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/gcs/example_gcs_transform_timespan.py b/providers/google/tests/system/google/cloud/gcs/example_gcs_transform_timespan.py index 117cf35c74236..7da375b960ae0 100644 --- a/providers/google/tests/system/google/cloud/gcs/example_gcs_transform_timespan.py +++ b/providers/google/tests/system/google/cloud/gcs/example_gcs_transform_timespan.py @@ -25,9 +25,6 @@ from datetime import datetime from pathlib import Path -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID -from providers.openlineage.tests.system.openlineage.operator import OpenLineageTestOperator - from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.gcs import ( @@ -37,6 +34,8 @@ ) from airflow.providers.google.cloud.transfers.gcs_to_gcs import GCSToGCSOperator from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from system.openlineage.operator import OpenLineageTestOperator ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/gcs/example_gcs_upload_download.py b/providers/google/tests/system/google/cloud/gcs/example_gcs_upload_download.py index 8fce694fa0ddc..aab8072685db3 100644 --- a/providers/google/tests/system/google/cloud/gcs/example_gcs_upload_download.py +++ b/providers/google/tests/system/google/cloud/gcs/example_gcs_upload_download.py @@ -25,14 +25,13 @@ from datetime import datetime from pathlib import Path -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID -from providers.openlineage.tests.system.openlineage.operator import OpenLineageTestOperator - from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from system.openlineage.operator import OpenLineageTestOperator ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/gcs/example_gdrive_to_gcs.py b/providers/google/tests/system/google/cloud/gcs/example_gdrive_to_gcs.py index b9c4fbfb18c1c..efaac8717bcc1 100644 --- a/providers/google/tests/system/google/cloud/gcs/example_gdrive_to_gcs.py +++ b/providers/google/tests/system/google/cloud/gcs/example_gdrive_to_gcs.py @@ -22,8 +22,6 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.decorators import task from airflow.models import Connection from airflow.models.dag import DAG @@ -35,6 +33,7 @@ from airflow.providers.google.suite.transfers.gcs_to_gdrive import GCSToGoogleDriveOperator from airflow.settings import Session from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/gcs/example_mssql_to_gcs.py b/providers/google/tests/system/google/cloud/gcs/example_mssql_to_gcs.py index 92834d54cbc46..838ec9fd1fd3c 100644 --- a/providers/google/tests/system/google/cloud/gcs/example_mssql_to_gcs.py +++ b/providers/google/tests/system/google/cloud/gcs/example_mssql_to_gcs.py @@ -21,11 +21,11 @@ from pathlib import Path import pytest -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID -from providers.openlineage.tests.system.openlineage.operator import OpenLineageTestOperator from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from system.openlineage.operator import OpenLineageTestOperator try: from airflow.providers.google.cloud.transfers.mssql_to_gcs import MSSQLToGCSOperator diff --git a/providers/google/tests/system/google/cloud/gcs/example_mysql_to_gcs.py b/providers/google/tests/system/google/cloud/gcs/example_mysql_to_gcs.py index f0f4f494fb607..d9deeaf3a348d 100644 --- a/providers/google/tests/system/google/cloud/gcs/example_mysql_to_gcs.py +++ b/providers/google/tests/system/google/cloud/gcs/example_mysql_to_gcs.py @@ -31,7 +31,6 @@ from pathlib import Path import pytest -from providers.openlineage.tests.system.openlineage.operator import OpenLineageTestOperator from airflow.decorators import task from airflow.models import Connection @@ -51,6 +50,7 @@ from airflow.providers.standard.operators.bash import BashOperator from airflow.settings import Session from airflow.utils.trigger_rule import TriggerRule +from system.openlineage.operator import OpenLineageTestOperator try: from airflow.providers.google.cloud.transfers.mysql_to_gcs import MySQLToGCSOperator diff --git a/providers/google/tests/system/google/cloud/gcs/example_oracle_to_gcs.py b/providers/google/tests/system/google/cloud/gcs/example_oracle_to_gcs.py index 7699dc7c8333c..bba7a2be2eedc 100644 --- a/providers/google/tests/system/google/cloud/gcs/example_oracle_to_gcs.py +++ b/providers/google/tests/system/google/cloud/gcs/example_oracle_to_gcs.py @@ -19,12 +19,11 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.transfers.oracle_to_gcs import OracleToGCSOperator from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/gcs/example_s3_to_gcs.py b/providers/google/tests/system/google/cloud/gcs/example_s3_to_gcs.py index 519f26f329399..e35ca345945b6 100644 --- a/providers/google/tests/system/google/cloud/gcs/example_s3_to_gcs.py +++ b/providers/google/tests/system/google/cloud/gcs/example_s3_to_gcs.py @@ -20,9 +20,6 @@ from datetime import datetime from pathlib import Path -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID -from providers.openlineage.tests.system.openlineage.operator import OpenLineageTestOperator - from airflow.decorators import task from airflow.models.dag import DAG from airflow.providers.amazon.aws.hooks.s3 import S3Hook @@ -31,6 +28,8 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.transfers.s3_to_gcs import S3ToGCSOperator from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from system.openlineage.operator import OpenLineageTestOperator ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") GCP_PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/gcs/example_sftp_to_gcs.py b/providers/google/tests/system/google/cloud/gcs/example_sftp_to_gcs.py index 734dd22cff048..7c65605926e0a 100644 --- a/providers/google/tests/system/google/cloud/gcs/example_sftp_to_gcs.py +++ b/providers/google/tests/system/google/cloud/gcs/example_sftp_to_gcs.py @@ -25,15 +25,14 @@ from datetime import datetime from pathlib import Path -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID -from providers.openlineage.tests.system.openlineage.operator import OpenLineageTestOperator - from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.transfers.sftp_to_gcs import SFTPToGCSOperator from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from system.openlineage.operator import OpenLineageTestOperator ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/gcs/example_trino_to_gcs.py b/providers/google/tests/system/google/cloud/gcs/example_trino_to_gcs.py index 18746c2d753ba..1241cc542a17e 100644 --- a/providers/google/tests/system/google/cloud/gcs/example_trino_to_gcs.py +++ b/providers/google/tests/system/google/cloud/gcs/example_trino_to_gcs.py @@ -26,8 +26,6 @@ from datetime import datetime from pathlib import Path -from providers.openlineage.tests.system.openlineage.operator import OpenLineageTestOperator - from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.bigquery import ( BigQueryCreateEmptyDatasetOperator, @@ -38,6 +36,7 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.transfers.trino_to_gcs import TrinoToGCSOperator from airflow.utils.trigger_rule import TriggerRule +from system.openlineage.operator import OpenLineageTestOperator ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "example_trino_to_gcs" diff --git a/providers/google/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine.py b/providers/google/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine.py index 91c444632245e..c748c088a1063 100644 --- a/providers/google/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine.py +++ b/providers/google/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine.py @@ -24,8 +24,6 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.kubernetes_engine import ( GKECreateClusterOperator, @@ -34,6 +32,7 @@ ) from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "kubernetes_engine" diff --git a/providers/google/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py b/providers/google/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py index 743017ca12b4d..216459785b568 100644 --- a/providers/google/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py +++ b/providers/google/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py @@ -24,8 +24,6 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.kubernetes_engine import ( GKECreateClusterOperator, @@ -34,6 +32,7 @@ ) from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "kubernetes_engine_async" diff --git a/providers/google/tests/system/google/cloud/life_sciences/example_life_sciences.py b/providers/google/tests/system/google/cloud/life_sciences/example_life_sciences.py index 427fb51588779..458d1ec06a95a 100644 --- a/providers/google/tests/system/google/cloud/life_sciences/example_life_sciences.py +++ b/providers/google/tests/system/google/cloud/life_sciences/example_life_sciences.py @@ -21,14 +21,13 @@ from datetime import datetime from pathlib import Path -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.operators.life_sciences import LifeSciencesRunPipelineOperator from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/ml_engine/example_mlengine.py b/providers/google/tests/system/google/cloud/ml_engine/example_mlengine.py index 4b70359ee6788..a735d3015ab55 100644 --- a/providers/google/tests/system/google/cloud/ml_engine/example_mlengine.py +++ b/providers/google/tests/system/google/cloud/ml_engine/example_mlengine.py @@ -24,6 +24,10 @@ import os from datetime import datetime +from google.cloud.aiplatform import schema +from google.protobuf.json_format import ParseDict +from google.protobuf.struct_pb2 import Value + from airflow import models from airflow.providers.google.cloud.operators.gcs import ( GCSCreateBucketOperator, @@ -50,9 +54,6 @@ ) from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule -from google.cloud.aiplatform import schema -from google.protobuf.json_format import ParseDict -from google.protobuf.struct_pb2 import Value PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default") ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") diff --git a/providers/google/tests/system/google/cloud/natural_language/example_natural_language.py b/providers/google/tests/system/google/cloud/natural_language/example_natural_language.py index 2ef2941814355..4eb542d9aecce 100644 --- a/providers/google/tests/system/google/cloud/natural_language/example_natural_language.py +++ b/providers/google/tests/system/google/cloud/natural_language/example_natural_language.py @@ -24,6 +24,8 @@ import os from datetime import datetime +from google.cloud.language_v1 import Document + from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.natural_language import ( CloudNaturalLanguageAnalyzeEntitiesOperator, @@ -32,7 +34,6 @@ CloudNaturalLanguageClassifyTextOperator, ) from airflow.providers.standard.operators.bash import BashOperator -from google.cloud.language_v1 import Document ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "gcp_natural_language" diff --git a/providers/google/tests/system/google/cloud/spanner/example_spanner.py b/providers/google/tests/system/google/cloud/spanner/example_spanner.py index 62dc44d770d25..aa719c73b459a 100644 --- a/providers/google/tests/system/google/cloud/spanner/example_spanner.py +++ b/providers/google/tests/system/google/cloud/spanner/example_spanner.py @@ -24,8 +24,6 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.spanner import ( SpannerDeleteDatabaseInstanceOperator, @@ -36,6 +34,7 @@ SpannerUpdateDatabaseInstanceOperator, ) from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/speech_to_text/example_speech_to_text.py b/providers/google/tests/system/google/cloud/speech_to_text/example_speech_to_text.py index 00771fe456538..a6af70d64a962 100644 --- a/providers/google/tests/system/google/cloud/speech_to_text/example_speech_to_text.py +++ b/providers/google/tests/system/google/cloud/speech_to_text/example_speech_to_text.py @@ -20,14 +20,14 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.cloud.speech_v1 import RecognitionAudio, RecognitionConfig from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.operators.speech_to_text import CloudSpeechToTextRecognizeSpeechOperator from airflow.providers.google.cloud.operators.text_to_speech import CloudTextToSpeechSynthesizeOperator from airflow.utils.trigger_rule import TriggerRule -from google.cloud.speech_v1 import RecognitionAudio, RecognitionConfig +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/stackdriver/example_stackdriver.py b/providers/google/tests/system/google/cloud/stackdriver/example_stackdriver.py index 4815675d8c80b..8e6e7e45b5922 100644 --- a/providers/google/tests/system/google/cloud/stackdriver/example_stackdriver.py +++ b/providers/google/tests/system/google/cloud/stackdriver/example_stackdriver.py @@ -25,8 +25,6 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.stackdriver import ( @@ -42,6 +40,7 @@ StackdriverUpsertNotificationChannelOperator, ) from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py b/providers/google/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py index 30bd9c75211a0..9e79be17d5e80 100644 --- a/providers/google/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py +++ b/providers/google/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py @@ -25,8 +25,6 @@ from copy import deepcopy from datetime import datetime, timedelta, timezone -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.gcs_to_s3 import GCSToS3Operator @@ -64,6 +62,7 @@ CloudDataTransferServiceJobStatusSensor, ) from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") GCP_PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/tasks/example_queue.py b/providers/google/tests/system/google/cloud/tasks/example_queue.py index e10efbf668da2..614bdfd352073 100644 --- a/providers/google/tests/system/google/cloud/tasks/example_queue.py +++ b/providers/google/tests/system/google/cloud/tasks/example_queue.py @@ -28,6 +28,10 @@ import os from datetime import datetime +from google.api_core.retry import Retry +from google.cloud.tasks_v2.types import Queue +from google.protobuf.field_mask_pb2 import FieldMask + from airflow.decorators import task from airflow.models.baseoperator import chain from airflow.models.dag import DAG @@ -43,9 +47,6 @@ ) from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule -from google.api_core.retry import Retry -from google.cloud.tasks_v2.types import Queue -from google.protobuf.field_mask_pb2 import FieldMask ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "cloud_tasks_queue" diff --git a/providers/google/tests/system/google/cloud/tasks/example_tasks.py b/providers/google/tests/system/google/cloud/tasks/example_tasks.py index 51cd2b4758087..f4ab2f53b0bb4 100644 --- a/providers/google/tests/system/google/cloud/tasks/example_tasks.py +++ b/providers/google/tests/system/google/cloud/tasks/example_tasks.py @@ -25,6 +25,10 @@ import os from datetime import datetime, timedelta +from google.api_core.retry import Retry +from google.cloud.tasks_v2.types import Queue +from google.protobuf import timestamp_pb2 + from airflow.decorators import task from airflow.models.baseoperator import chain from airflow.models.dag import DAG @@ -38,9 +42,6 @@ CloudTasksTasksListOperator, ) from airflow.utils.trigger_rule import TriggerRule -from google.api_core.retry import Retry -from google.cloud.tasks_v2.types import Queue -from google.protobuf import timestamp_pb2 ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "cloud_tasks_tasks" diff --git a/providers/google/tests/system/google/cloud/text_to_speech/example_text_to_speech.py b/providers/google/tests/system/google/cloud/text_to_speech/example_text_to_speech.py index 7c8cfeffa4c1d..e922670a54d93 100644 --- a/providers/google/tests/system/google/cloud/text_to_speech/example_text_to_speech.py +++ b/providers/google/tests/system/google/cloud/text_to_speech/example_text_to_speech.py @@ -20,12 +20,11 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.operators.text_to_speech import CloudTextToSpeechSynthesizeOperator from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/transfers/example_gcs_to_sftp.py b/providers/google/tests/system/google/cloud/transfers/example_gcs_to_sftp.py index 30ad3805e36f2..5908b653ab509 100644 --- a/providers/google/tests/system/google/cloud/transfers/example_gcs_to_sftp.py +++ b/providers/google/tests/system/google/cloud/transfers/example_gcs_to_sftp.py @@ -25,14 +25,13 @@ from datetime import datetime from pathlib import Path -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.transfers.gcs_to_sftp import GCSToSFTPOperator from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator from airflow.providers.sftp.sensors.sftp import SFTPSensor from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/transfers/example_gdrive_to_local.py b/providers/google/tests/system/google/cloud/transfers/example_gdrive_to_local.py index fac51d520fa20..9705c0749ddad 100644 --- a/providers/google/tests/system/google/cloud/transfers/example_gdrive_to_local.py +++ b/providers/google/tests/system/google/cloud/transfers/example_gdrive_to_local.py @@ -29,8 +29,6 @@ from datetime import datetime from pathlib import Path -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow.decorators import task from airflow.models import Connection from airflow.models.dag import DAG @@ -42,6 +40,7 @@ from airflow.providers.google.suite.transfers.gcs_to_gdrive import GCSToGoogleDriveOperator from airflow.settings import Session, json from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/cloud/translate_speech/example_translate_speech.py b/providers/google/tests/system/google/cloud/translate_speech/example_translate_speech.py index 97645baf78b9b..2e023d68ae537 100644 --- a/providers/google/tests/system/google/cloud/translate_speech/example_translate_speech.py +++ b/providers/google/tests/system/google/cloud/translate_speech/example_translate_speech.py @@ -20,12 +20,13 @@ import os from datetime import datetime +from google.cloud.speech_v1 import RecognitionAudio, RecognitionConfig + from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.operators.text_to_speech import CloudTextToSpeechSynthesizeOperator from airflow.providers.google.cloud.operators.translate_speech import CloudTranslateSpeechOperator from airflow.utils.trigger_rule import TriggerRule -from google.cloud.speech_v1 import RecognitionAudio, RecognitionConfig ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default") diff --git a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_forecasting_training.py b/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_forecasting_training.py index 45a1110a6e659..e22622dd58605 100644 --- a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_forecasting_training.py +++ b/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_forecasting_training.py @@ -26,6 +26,10 @@ import os from datetime import datetime +from google.cloud.aiplatform import schema +from google.protobuf.json_format import ParseDict +from google.protobuf.struct_pb2 import Value + from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.vertex_ai.auto_ml import ( CreateAutoMLForecastingTrainingJobOperator, @@ -36,9 +40,6 @@ DeleteDatasetOperator, ) from airflow.utils.trigger_rule import TriggerRule -from google.cloud.aiplatform import schema -from google.protobuf.json_format import ParseDict -from google.protobuf.struct_pb2 import Value ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default") diff --git a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_image_object_detection.py b/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_image_object_detection.py index 80a6be2ad6898..7a0796c6798c4 100644 --- a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_image_object_detection.py +++ b/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_image_object_detection.py @@ -24,6 +24,9 @@ import os from datetime import datetime +from google.cloud.aiplatform import schema +from google.protobuf.struct_pb2 import Value + from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.vertex_ai.auto_ml import ( CreateAutoMLImageTrainingJobOperator, @@ -35,8 +38,6 @@ ImportDataOperator, ) from airflow.utils.trigger_rule import TriggerRule -from google.cloud.aiplatform import schema -from google.protobuf.struct_pb2 import Value ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default") diff --git a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_image_training.py b/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_image_training.py index 6aa45eac5a1cd..77629b0bc78c1 100644 --- a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_image_training.py +++ b/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_image_training.py @@ -26,6 +26,9 @@ import os from datetime import datetime +from google.cloud.aiplatform import schema +from google.protobuf.struct_pb2 import Value + from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.vertex_ai.auto_ml import ( CreateAutoMLImageTrainingJobOperator, @@ -37,8 +40,6 @@ ImportDataOperator, ) from airflow.utils.trigger_rule import TriggerRule -from google.cloud.aiplatform import schema -from google.protobuf.struct_pb2 import Value ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default") diff --git a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_tabular_training.py b/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_tabular_training.py index 345a54cf8ffe6..106e622abad5e 100644 --- a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_tabular_training.py +++ b/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_tabular_training.py @@ -26,6 +26,10 @@ import os from datetime import datetime +from google.cloud.aiplatform import schema +from google.protobuf.json_format import ParseDict +from google.protobuf.struct_pb2 import Value + from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.vertex_ai.auto_ml import ( CreateAutoMLTabularTrainingJobOperator, @@ -36,9 +40,6 @@ DeleteDatasetOperator, ) from airflow.utils.trigger_rule import TriggerRule -from google.cloud.aiplatform import schema -from google.protobuf.json_format import ParseDict -from google.protobuf.struct_pb2 import Value ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default") diff --git a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_tracking.py b/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_tracking.py index 03d36f947d5e1..38d1e72024d4e 100644 --- a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_tracking.py +++ b/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_tracking.py @@ -24,6 +24,9 @@ import os from datetime import datetime +from google.cloud.aiplatform import schema +from google.protobuf.struct_pb2 import Value + from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.gcs import ( GCSCreateBucketOperator, @@ -40,8 +43,6 @@ ImportDataOperator, ) from airflow.utils.trigger_rule import TriggerRule -from google.cloud.aiplatform import schema -from google.protobuf.struct_pb2 import Value ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default") diff --git a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py b/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py index de17313ebac4a..92133b8271889 100644 --- a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py +++ b/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py @@ -26,6 +26,9 @@ import os from datetime import datetime +from google.cloud.aiplatform import schema +from google.protobuf.struct_pb2 import Value + from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.vertex_ai.auto_ml import ( CreateAutoMLVideoTrainingJobOperator, @@ -37,8 +40,6 @@ ImportDataOperator, ) from airflow.utils.trigger_rule import TriggerRule -from google.cloud.aiplatform import schema -from google.protobuf.struct_pb2 import Value ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default") diff --git a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py b/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py index 8a899a6ce84db..06a56f379654f 100644 --- a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py +++ b/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py @@ -26,6 +26,10 @@ import os from datetime import datetime +from google.cloud.aiplatform import schema +from google.protobuf.json_format import ParseDict +from google.protobuf.struct_pb2 import Value + from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.gcs import ( GCSCreateBucketOperator, @@ -45,9 +49,6 @@ DeleteDatasetOperator, ) from airflow.utils.trigger_rule import TriggerRule -from google.cloud.aiplatform import schema -from google.protobuf.json_format import ParseDict -from google.protobuf.struct_pb2 import Value ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default") diff --git a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_container.py b/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_container.py index df5bb0887ca0b..316e0d3a7cbbe 100644 --- a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_container.py +++ b/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_container.py @@ -24,6 +24,10 @@ import os from datetime import datetime +from google.cloud.aiplatform import schema +from google.protobuf.json_format import ParseDict +from google.protobuf.struct_pb2 import Value + from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.gcs import ( GCSCreateBucketOperator, @@ -39,9 +43,6 @@ DeleteDatasetOperator, ) from airflow.utils.trigger_rule import TriggerRule -from google.cloud.aiplatform import schema -from google.protobuf.json_format import ParseDict -from google.protobuf.struct_pb2 import Value ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default") diff --git a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job.py b/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job.py index 5fb0c7c5975e7..22c3510d1023c 100644 --- a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job.py +++ b/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job.py @@ -24,6 +24,10 @@ import os from datetime import datetime +from google.cloud.aiplatform import schema +from google.protobuf.json_format import ParseDict +from google.protobuf.struct_pb2 import Value + from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.gcs import ( @@ -41,9 +45,6 @@ ) from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator from airflow.utils.trigger_rule import TriggerRule -from google.cloud.aiplatform import schema -from google.protobuf.json_format import ParseDict -from google.protobuf.struct_pb2 import Value ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default") diff --git a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job_python_package.py b/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job_python_package.py index 35f714b2f54d1..f1b471d446262 100644 --- a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job_python_package.py +++ b/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job_python_package.py @@ -24,6 +24,10 @@ import os from datetime import datetime +from google.cloud.aiplatform import schema +from google.protobuf.json_format import ParseDict +from google.protobuf.struct_pb2 import Value + from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.gcs import ( GCSCreateBucketOperator, @@ -39,9 +43,6 @@ DeleteDatasetOperator, ) from airflow.utils.trigger_rule import TriggerRule -from google.cloud.aiplatform import schema -from google.protobuf.json_format import ParseDict -from google.protobuf.struct_pb2 import Value ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default") diff --git a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py b/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py index 3b4f3b1ddfe72..af4fbb88c2b90 100644 --- a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py +++ b/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py @@ -26,6 +26,10 @@ import os from datetime import datetime +from google.cloud.aiplatform import schema +from google.protobuf.json_format import ParseDict +from google.protobuf.struct_pb2 import Value + from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.gcs import ( GCSCreateBucketOperator, @@ -41,9 +45,6 @@ UpdateDatasetOperator, ) from airflow.utils.trigger_rule import TriggerRule -from google.cloud.aiplatform import schema -from google.protobuf.json_format import ParseDict -from google.protobuf.struct_pb2 import Value ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default") diff --git a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_endpoint.py b/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_endpoint.py index 8f8bb91e816cd..13e24b869c79e 100644 --- a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_endpoint.py +++ b/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_endpoint.py @@ -26,6 +26,9 @@ import os from datetime import datetime +from google.cloud.aiplatform import schema +from google.protobuf.struct_pb2 import Value + from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.vertex_ai.auto_ml import ( CreateAutoMLImageTrainingJobOperator, @@ -44,8 +47,6 @@ UndeployModelOperator, ) from airflow.utils.trigger_rule import TriggerRule -from google.cloud.aiplatform import schema -from google.protobuf.struct_pb2 import Value ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default") diff --git a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py b/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py index 2990095e5de6f..2a3721d0d0a67 100644 --- a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py +++ b/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py @@ -26,6 +26,8 @@ import os from datetime import datetime +from google.cloud import aiplatform + from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.operators.vertex_ai.hyperparameter_tuning_job import ( @@ -35,7 +37,6 @@ ListHyperparameterTuningJobOperator, ) from airflow.utils.trigger_rule import TriggerRule -from google.cloud import aiplatform ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default") diff --git a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py b/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py index 9383277747d1a..e29cc4ea198a5 100644 --- a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py +++ b/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py @@ -26,6 +26,10 @@ import os from datetime import datetime +from google.cloud.aiplatform import schema +from google.protobuf.json_format import ParseDict +from google.protobuf.struct_pb2 import Value + from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.gcs import ( GCSCreateBucketOperator, @@ -54,9 +58,6 @@ ) from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator from airflow.utils.trigger_rule import TriggerRule -from google.cloud.aiplatform import schema -from google.protobuf.json_format import ParseDict -from google.protobuf.struct_pb2 import Value ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default") diff --git a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py b/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py index 82633fc622696..bc97ff6a58be3 100644 --- a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py +++ b/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py @@ -26,6 +26,10 @@ import os from datetime import datetime +from google.cloud.aiplatform import schema +from google.protobuf.json_format import ParseDict +from google.protobuf.struct_pb2 import Value + from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.gcs import ( GCSCreateBucketOperator, @@ -42,9 +46,6 @@ RunPipelineJobOperator, ) from airflow.utils.trigger_rule import TriggerRule -from google.cloud.aiplatform import schema -from google.protobuf.json_format import ParseDict -from google.protobuf.struct_pb2 import Value ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default") diff --git a/providers/google/tests/system/google/cloud/video_intelligence/example_video_intelligence.py b/providers/google/tests/system/google/cloud/video_intelligence/example_video_intelligence.py index 4527bb856f186..21ee27cd9be76 100644 --- a/providers/google/tests/system/google/cloud/video_intelligence/example_video_intelligence.py +++ b/providers/google/tests/system/google/cloud/video_intelligence/example_video_intelligence.py @@ -29,6 +29,8 @@ import os from datetime import datetime +from google.api_core.retry import Retry + from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator @@ -40,7 +42,6 @@ from airflow.providers.google.cloud.transfers.gcs_to_gcs import GCSToGCSOperator from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule -from google.api_core.retry import Retry ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") diff --git a/providers/google/tests/system/google/cloud/vision/example_vision_annotate_image.py b/providers/google/tests/system/google/cloud/vision/example_vision_annotate_image.py index f9f85f0aefba1..16c6cdb891141 100644 --- a/providers/google/tests/system/google/cloud/vision/example_vision_annotate_image.py +++ b/providers/google/tests/system/google/cloud/vision/example_vision_annotate_image.py @@ -41,7 +41,7 @@ # [START howto_operator_vision_enums_import] from google.cloud.vision_v1 import Feature # isort:skip -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID # [END howto_operator_vision_enums_import] diff --git a/providers/google/tests/system/google/cloud/vision/example_vision_autogenerated.py b/providers/google/tests/system/google/cloud/vision/example_vision_autogenerated.py index de442a462006d..9b52473f5456f 100644 --- a/providers/google/tests/system/google/cloud/vision/example_vision_autogenerated.py +++ b/providers/google/tests/system/google/cloud/vision/example_vision_autogenerated.py @@ -59,7 +59,7 @@ # [END howto_operator_vision_reference_image_import] # [START howto_operator_vision_enums_import] from google.cloud.vision_v1 import Feature # isort:skip -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID # [END howto_operator_vision_enums_import] diff --git a/providers/google/tests/system/google/cloud/vision/example_vision_explicit.py b/providers/google/tests/system/google/cloud/vision/example_vision_explicit.py index 7b035b3474288..3578e77608c27 100644 --- a/providers/google/tests/system/google/cloud/vision/example_vision_explicit.py +++ b/providers/google/tests/system/google/cloud/vision/example_vision_explicit.py @@ -54,7 +54,7 @@ # [END howto_operator_vision_product_import_2] # [START howto_operator_vision_reference_image_import_2] from google.cloud.vision_v1.types import ReferenceImage # isort:skip -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID # [END howto_operator_vision_reference_image_import_2] diff --git a/providers/google/tests/system/google/cloud/workflows/example_workflows.py b/providers/google/tests/system/google/cloud/workflows/example_workflows.py index 3dfa41003adfa..b933d75a6f2b6 100644 --- a/providers/google/tests/system/google/cloud/workflows/example_workflows.py +++ b/providers/google/tests/system/google/cloud/workflows/example_workflows.py @@ -20,6 +20,8 @@ from datetime import datetime from typing import cast +from google.protobuf.field_mask_pb2 import FieldMask + from airflow.models.dag import DAG from airflow.models.xcom_arg import XComArg from airflow.providers.google.cloud.operators.workflows import ( @@ -35,7 +37,6 @@ ) from airflow.providers.google.cloud.sensors.workflows import WorkflowExecutionSensor from airflow.utils.trigger_rule import TriggerRule -from google.protobuf.field_mask_pb2 import FieldMask ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default") diff --git a/providers/google/tests/system/google/conftest.py b/providers/google/tests/system/google/conftest.py index a43620d669f2d..1c377b7a34880 100644 --- a/providers/google/tests/system/google/conftest.py +++ b/providers/google/tests/system/google/conftest.py @@ -18,7 +18,7 @@ import pytest -from providers.tests.system.openlineage.conftest import set_transport_variable # noqa: F401 +from system.openlineage.conftest import set_transport_variable # noqa: F401 REQUIRED_ENV_VARS = ("SYSTEM_TESTS_GCP_PROJECT",) diff --git a/providers/google/tests/system/google/datacatalog/example_datacatalog_entries.py b/providers/google/tests/system/google/datacatalog/example_datacatalog_entries.py index d3e0835fe8903..cc6858a6d52c2 100644 --- a/providers/google/tests/system/google/datacatalog/example_datacatalog_entries.py +++ b/providers/google/tests/system/google/datacatalog/example_datacatalog_entries.py @@ -20,7 +20,7 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.protobuf.field_mask_pb2 import FieldMask from airflow.models.dag import DAG from airflow.models.xcom_arg import XComArg @@ -37,7 +37,7 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule -from google.protobuf.field_mask_pb2 import FieldMask +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/datacatalog/example_datacatalog_search_catalog.py b/providers/google/tests/system/google/datacatalog/example_datacatalog_search_catalog.py index 5a290220bc04a..cf103e54f625c 100644 --- a/providers/google/tests/system/google/datacatalog/example_datacatalog_search_catalog.py +++ b/providers/google/tests/system/google/datacatalog/example_datacatalog_search_catalog.py @@ -21,7 +21,7 @@ from datetime import datetime from typing import cast -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.cloud.datacatalog import TagField, TagTemplateField from airflow.models.dag import DAG from airflow.models.xcom_arg import XComArg @@ -39,7 +39,7 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule -from google.cloud.datacatalog import TagField, TagTemplateField +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/datacatalog/example_datacatalog_tag_templates.py b/providers/google/tests/system/google/datacatalog/example_datacatalog_tag_templates.py index c19807eb44508..c63b6b3438526 100644 --- a/providers/google/tests/system/google/datacatalog/example_datacatalog_tag_templates.py +++ b/providers/google/tests/system/google/datacatalog/example_datacatalog_tag_templates.py @@ -20,7 +20,7 @@ import os from datetime import datetime -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.cloud.datacatalog import FieldType, TagTemplateField from airflow.models.dag import DAG from airflow.models.xcom_arg import XComArg @@ -36,7 +36,7 @@ ) from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule -from google.cloud.datacatalog import FieldType, TagTemplateField +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/datacatalog/example_datacatalog_tags.py b/providers/google/tests/system/google/datacatalog/example_datacatalog_tags.py index 2d3b6d38cd46e..60bc2eb057205 100644 --- a/providers/google/tests/system/google/datacatalog/example_datacatalog_tags.py +++ b/providers/google/tests/system/google/datacatalog/example_datacatalog_tags.py @@ -21,7 +21,7 @@ from datetime import datetime from typing import cast -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.cloud.datacatalog import TagField, TagTemplateField from airflow.models.dag import DAG from airflow.models.xcom_arg import XComArg @@ -40,7 +40,7 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule -from google.cloud.datacatalog import TagField, TagTemplateField +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/google/tests/system/google/marketing_platform/example_analytics_admin.py b/providers/google/tests/system/google/marketing_platform/example_analytics_admin.py index 52fae6c75c5b4..3536e7b945da2 100644 --- a/providers/google/tests/system/google/marketing_platform/example_analytics_admin.py +++ b/providers/google/tests/system/google/marketing_platform/example_analytics_admin.py @@ -41,6 +41,9 @@ import os from datetime import datetime +from google.analytics import admin_v1beta as google_analytics +from google.cloud.exceptions import NotFound + from airflow.decorators import task from airflow.models import Connection from airflow.models.dag import DAG @@ -56,8 +59,6 @@ ) from airflow.settings import Session from airflow.utils.trigger_rule import TriggerRule -from google.analytics import admin_v1beta as google_analytics -from google.cloud.exceptions import NotFound ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "google_analytics_admin" diff --git a/providers/google/tests/system/google/marketing_platform/example_campaign_manager.py b/providers/google/tests/system/google/marketing_platform/example_campaign_manager.py index 87247d01f3f47..07795ed310234 100644 --- a/providers/google/tests/system/google/marketing_platform/example_campaign_manager.py +++ b/providers/google/tests/system/google/marketing_platform/example_campaign_manager.py @@ -34,7 +34,7 @@ from datetime import datetime from typing import cast -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from google.api_core.exceptions import NotFound from airflow.decorators import task from airflow.models import Connection @@ -55,7 +55,7 @@ ) from airflow.settings import Session from airflow.utils.trigger_rule import TriggerRule -from google.api_core.exceptions import NotFound +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/grpc/tests/conftest.py b/providers/grpc/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/grpc/tests/conftest.py +++ b/providers/grpc/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/hashicorp/tests/conftest.py b/providers/hashicorp/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/hashicorp/tests/conftest.py +++ b/providers/hashicorp/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/http/tests/conftest.py b/providers/http/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/http/tests/conftest.py +++ b/providers/http/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/http/tests/system/__init__.py b/providers/http/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/http/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/imap/tests/conftest.py b/providers/imap/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/imap/tests/conftest.py +++ b/providers/imap/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/influxdb/tests/conftest.py b/providers/influxdb/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/influxdb/tests/conftest.py +++ b/providers/influxdb/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/influxdb/tests/system/__init__.py b/providers/influxdb/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/influxdb/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/jdbc/tests/conftest.py b/providers/jdbc/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/jdbc/tests/conftest.py +++ b/providers/jdbc/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/jdbc/tests/system/__init__.py b/providers/jdbc/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/jdbc/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/jenkins/tests/conftest.py b/providers/jenkins/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/jenkins/tests/conftest.py +++ b/providers/jenkins/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/jenkins/tests/system/__init__.py b/providers/jenkins/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/jenkins/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/microsoft/azure/docs/index.rst b/providers/microsoft/azure/docs/index.rst index 7ede8d9f05158..ac044ee399748 100644 --- a/providers/microsoft/azure/docs/index.rst +++ b/providers/microsoft/azure/docs/index.rst @@ -61,7 +61,7 @@ :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/providers/microsoft/azure/tests/conftest.py b/providers/microsoft/azure/tests/conftest.py index 9034f7b9e04ea..e9be8dfdfa181 100644 --- a/providers/microsoft/azure/tests/conftest.py +++ b/providers/microsoft/azure/tests/conftest.py @@ -20,34 +20,14 @@ pytest_plugins = "tests_common.pytest_plugin" - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) - - -import json -import pathlib import random -import re import string -from inspect import currentframe -from json import JSONDecodeError -from os.path import dirname, join -from typing import Any, TypeVar -from unittest.mock import MagicMock - -from httpx import Headers, Response -from msgraph_core import APIVersion +from typing import TypeVar @pytest.fixture def create_mock_connection(monkeypatch): - """Helper fixture for create test connection.""" + """Create test connection.""" from airflow.models import Connection T = TypeVar("T", dict, str, Connection) @@ -76,7 +56,7 @@ def wrapper(conn: T, conn_id: str | None = None): @pytest.fixture def create_mock_connections(create_mock_connection): - """Helper fixture for create multiple test connections.""" + """Create multiple test connections.""" from airflow.models import Connection T = TypeVar("T", dict, str, Connection) @@ -89,109 +69,10 @@ def wrapper(*conns: T): @pytest.fixture def mocked_connection(request, create_mock_connection): - """Helper indirect fixture for create test connection.""" + """Create test connection.""" return create_mock_connection(request.param) -def mock_connection(schema: str | None = None, host: str | None = None): - from airflow.models import Connection - - connection = MagicMock(spec=Connection) - connection.schema = schema - connection.host = host - return connection - - -def mock_json_response(status_code, *contents) -> Response: - response = MagicMock(spec=Response) - response.status_code = status_code - response.headers = Headers({}) - response.content = b"" - if contents: - response.json.side_effect = list(contents) - else: - response.json.return_value = None - return response - - -def mock_response(status_code, content: Any = None, headers: dict | None = None) -> Response: - response = MagicMock(spec=Response) - response.status_code = status_code - response.headers = Headers(headers or {}) - response.content = content - response.json.side_effect = JSONDecodeError("", "", 0) - return response - - -def remove_license_header(content: str) -> str: - """ - Removes license header from the given content. - """ - # Define the pattern to match both block and single-line comments - pattern = r"(/\*.*?\*/)|(--.*?(\r?\n|\r))|(#.*?(\r?\n|\r))" - - # Check if there is a license header at the beginning of the file - if re.match(pattern, content, flags=re.DOTALL): - # Use re.DOTALL to allow .* to match newline characters in block comments - return re.sub(pattern, "", content, flags=re.DOTALL).strip() - return content.strip() - - -def load_json(*args: str): - directory = currentframe().f_back.f_globals["__name__"].split(".")[:-2] # type: ignore - with open(join(dirname(__file__), join(*directory), join(*args)), encoding="utf-8") as file: - return json.load(file) - - -def load_file(*args: str, mode="r", encoding="utf-8"): - directory = currentframe().f_back.f_globals["__name__"].split(".")[:-2] # type: ignore - with open(join(dirname(__file__), join(*directory), join(*args)), mode=mode, encoding=encoding) as file: - if mode == "r": - return remove_license_header(file.read()) - return file.read() - - -def get_airflow_connection( - conn_id: str, - host: str = "graph.microsoft.com", - login: str = "client_id", - password: str = "client_secret", - tenant_id: str = "tenant-id", - azure_tenant_id: str | None = None, - proxies: dict | None = None, - scopes: list[str] | None = None, - api_version: APIVersion | str | None = APIVersion.v1.value, - authority: str | None = None, - disable_instance_discovery: bool = False, -): - from airflow.models import Connection - - extra = { - "api_version": api_version, - "proxies": proxies or {}, - "verify": False, - "scopes": scopes or [], - "authority": authority, - "disable_instance_discovery": disable_instance_discovery, - } - - if azure_tenant_id: - extra["tenantId"] = azure_tenant_id - else: - extra["tenant_id"] = tenant_id - - return Connection( - schema="https", - conn_id=conn_id, - conn_type="http", - host=host, - port=80, - login=login, - password=password, - extra=extra, - ) - - @pytest.fixture(autouse=True) def clear_cache(): from airflow.providers.microsoft.azure.hooks.msgraph import KiotaRequestAdapterHook diff --git a/providers/microsoft/azure/tests/provider_tests/microsoft/azure/base.py b/providers/microsoft/azure/tests/provider_tests/microsoft/azure/base.py index c27e809892645..9ec505e01fa75 100644 --- a/providers/microsoft/azure/tests/provider_tests/microsoft/azure/base.py +++ b/providers/microsoft/azure/tests/provider_tests/microsoft/azure/base.py @@ -20,9 +20,9 @@ from unittest.mock import patch from kiota_http.httpx_request_adapter import HttpxRequestAdapter -from providers.microsoft.azure.tests.conftest import get_airflow_connection from airflow.providers.microsoft.azure.hooks.msgraph import KiotaRequestAdapterHook +from provider_tests.microsoft.azure.test_utils import get_airflow_connection class Base: diff --git a/providers/microsoft/azure/tests/provider_tests/microsoft/azure/hooks/test_msgraph.py b/providers/microsoft/azure/tests/provider_tests/microsoft/azure/hooks/test_msgraph.py index 50c3e0f2b610d..dd20e995adec4 100644 --- a/providers/microsoft/azure/tests/provider_tests/microsoft/azure/hooks/test_msgraph.py +++ b/providers/microsoft/azure/tests/provider_tests/microsoft/azure/hooks/test_msgraph.py @@ -30,14 +30,6 @@ from kiota_serialization_text.text_parse_node import TextParseNode from msgraph_core import APIVersion, NationalClouds from opentelemetry.trace import Span -from providers.microsoft.azure.tests.conftest import ( - get_airflow_connection, - load_file, - load_json, - mock_connection, - mock_json_response, - mock_response, -) from airflow.exceptions import ( AirflowBadRequest, @@ -50,6 +42,14 @@ DefaultResponseHandler, KiotaRequestAdapterHook, ) +from provider_tests.microsoft.azure.test_utils import ( + get_airflow_connection, + load_file, + load_json, + mock_connection, + mock_json_response, + mock_response, +) from tests_common.test_utils.providers import get_provider_min_airflow_version diff --git a/providers/microsoft/azure/tests/provider_tests/microsoft/azure/operators/test_msgraph.py b/providers/microsoft/azure/tests/provider_tests/microsoft/azure/operators/test_msgraph.py index 2e25bccb67b54..640918efb6f1a 100644 --- a/providers/microsoft/azure/tests/provider_tests/microsoft/azure/operators/test_msgraph.py +++ b/providers/microsoft/azure/tests/provider_tests/microsoft/azure/operators/test_msgraph.py @@ -22,17 +22,12 @@ from typing import TYPE_CHECKING, Any import pytest -from providers.microsoft.azure.tests.conftest import ( - load_file, - load_json, - mock_json_response, - mock_response, -) from airflow.exceptions import AirflowException from airflow.providers.microsoft.azure.operators.msgraph import MSGraphAsyncOperator from airflow.triggers.base import TriggerEvent from provider_tests.microsoft.azure.base import Base +from provider_tests.microsoft.azure.test_utils import load_file, load_json, mock_json_response, mock_response from tests_common.test_utils.mock_context import mock_context from tests_common.test_utils.operators.run_deferrable import execute_operator diff --git a/providers/microsoft/azure/tests/provider_tests/microsoft/azure/operators/test_powerbi.py b/providers/microsoft/azure/tests/provider_tests/microsoft/azure/operators/test_powerbi.py index 2c4ae5fccfd8a..f87833d4bcea2 100644 --- a/providers/microsoft/azure/tests/provider_tests/microsoft/azure/operators/test_powerbi.py +++ b/providers/microsoft/azure/tests/provider_tests/microsoft/azure/operators/test_powerbi.py @@ -21,7 +21,6 @@ from unittest.mock import MagicMock import pytest -from providers.microsoft.azure.tests.conftest import get_airflow_connection from airflow.exceptions import AirflowException, TaskDeferred from airflow.providers.microsoft.azure.hooks.powerbi import ( @@ -32,6 +31,7 @@ from airflow.providers.microsoft.azure.triggers.powerbi import PowerBITrigger from airflow.utils import timezone from provider_tests.microsoft.azure.base import Base +from provider_tests.microsoft.azure.test_utils import get_airflow_connection from tests_common.test_utils.mock_context import mock_context diff --git a/providers/microsoft/azure/tests/provider_tests/microsoft/azure/sensors/test_msgraph.py b/providers/microsoft/azure/tests/provider_tests/microsoft/azure/sensors/test_msgraph.py index 93206889a796d..0f1d29af49a01 100644 --- a/providers/microsoft/azure/tests/provider_tests/microsoft/azure/sensors/test_msgraph.py +++ b/providers/microsoft/azure/tests/provider_tests/microsoft/azure/sensors/test_msgraph.py @@ -20,11 +20,11 @@ from datetime import datetime import pytest -from providers.microsoft.azure.tests.conftest import load_json, mock_json_response from airflow.providers.microsoft.azure.sensors.msgraph import MSGraphSensor from airflow.triggers.base import TriggerEvent from provider_tests.microsoft.azure.base import Base +from provider_tests.microsoft.azure.test_utils import load_json, mock_json_response from tests_common.test_utils.operators.run_deferrable import execute_operator from tests_common.test_utils.version_compat import AIRFLOW_V_2_10_PLUS diff --git a/providers/microsoft/azure/tests/provider_tests/microsoft/azure/test_utils.py b/providers/microsoft/azure/tests/provider_tests/microsoft/azure/test_utils.py index 79abbf97f0423..c246444b44e31 100644 --- a/providers/microsoft/azure/tests/provider_tests/microsoft/azure/test_utils.py +++ b/providers/microsoft/azure/tests/provider_tests/microsoft/azure/test_utils.py @@ -17,10 +17,17 @@ from __future__ import annotations +import json +import re +from json import JSONDecodeError +from os.path import dirname, join from typing import Any from unittest import mock +from unittest.mock import MagicMock import pytest +from httpx import Headers, Response +from msgraph_core import APIVersion from airflow.providers.microsoft.azure.utils import ( AzureIdentityCredentialAdapter, @@ -159,3 +166,98 @@ def test_init_with_identity(self, mock_default_azure_credential, mock_policy, mo ) def test_parse_blob_account_url(host, login, expected_url): assert parse_blob_account_url(host, login) == expected_url + + +def get_airflow_connection( + conn_id: str, + host: str = "graph.microsoft.com", + login: str = "client_id", + password: str = "client_secret", + tenant_id: str = "tenant-id", + azure_tenant_id: str | None = None, + proxies: dict | None = None, + scopes: list[str] | None = None, + api_version: APIVersion | str | None = APIVersion.v1.value, + authority: str | None = None, + disable_instance_discovery: bool = False, +): + from airflow.models import Connection + + extra = { + "api_version": api_version, + "proxies": proxies or {}, + "verify": False, + "scopes": scopes or [], + "authority": authority, + "disable_instance_discovery": disable_instance_discovery, + } + + if azure_tenant_id: + extra["tenantId"] = azure_tenant_id + else: + extra["tenant_id"] = tenant_id + + return Connection( + schema="https", + conn_id=conn_id, + conn_type="http", + host=host, + port=80, + login=login, + password=password, + extra=extra, + ) + + +def mock_connection(schema: str | None = None, host: str | None = None): + from airflow.models import Connection + + connection = MagicMock(spec=Connection) + connection.schema = schema + connection.host = host + return connection + + +def mock_json_response(status_code, *contents) -> Response: + response = MagicMock(spec=Response) + response.status_code = status_code + response.headers = Headers({}) + response.content = b"" + if contents: + response.json.side_effect = list(contents) + else: + response.json.return_value = None + return response + + +def mock_response(status_code, content: Any = None, headers: dict | None = None) -> Response: + response = MagicMock(spec=Response) + response.status_code = status_code + response.headers = Headers(headers or {}) + response.content = content + response.json.side_effect = JSONDecodeError("", "", 0) + return response + + +def remove_license_header(content: str) -> str: + """Remove license header from the given content.""" + # Define the pattern to match both block and single-line comments + pattern = r"(/\*.*?\*/)|(--.*?(\r?\n|\r))|(#.*?(\r?\n|\r))" + + # Check if there is a license header at the beginning of the file + if re.match(pattern, content, flags=re.DOTALL): + # Use re.DOTALL to allow .* to match newline characters in block comments + return re.sub(pattern, "", content, flags=re.DOTALL).strip() + return content.strip() + + +def load_json(*args: str): + with open(join(dirname(__file__), *args), encoding="utf-8") as file: + return json.load(file) + + +def load_file(*args: str, mode="r", encoding="utf-8"): + with open(join(dirname(__file__), *args), mode=mode, encoding=encoding) as file: + if mode == "r": + return remove_license_header(file.read()) + return file.read() diff --git a/providers/microsoft/azure/tests/provider_tests/microsoft/azure/triggers/test_msgraph.py b/providers/microsoft/azure/tests/provider_tests/microsoft/azure/triggers/test_msgraph.py index 6627db79dc403..c82e93e7ca2ee 100644 --- a/providers/microsoft/azure/tests/provider_tests/microsoft/azure/triggers/test_msgraph.py +++ b/providers/microsoft/azure/tests/provider_tests/microsoft/azure/triggers/test_msgraph.py @@ -25,13 +25,6 @@ import pendulum from msgraph_core import APIVersion -from providers.microsoft.azure.tests.conftest import ( - get_airflow_connection, - load_file, - load_json, - mock_json_response, - mock_response, -) from airflow.exceptions import AirflowException from airflow.providers.microsoft.azure.hooks.msgraph import KiotaRequestAdapterHook @@ -41,6 +34,13 @@ ) from airflow.triggers.base import TriggerEvent from provider_tests.microsoft.azure.base import Base +from provider_tests.microsoft.azure.test_utils import ( + get_airflow_connection, + load_file, + load_json, + mock_json_response, + mock_response, +) from tests_common.test_utils.operators.run_deferrable import run_trigger diff --git a/providers/microsoft/azure/tests/provider_tests/microsoft/azure/triggers/test_powerbi.py b/providers/microsoft/azure/tests/provider_tests/microsoft/azure/triggers/test_powerbi.py index 251ac34b4a618..ed14e0a98e5a2 100644 --- a/providers/microsoft/azure/tests/provider_tests/microsoft/azure/triggers/test_powerbi.py +++ b/providers/microsoft/azure/tests/provider_tests/microsoft/azure/triggers/test_powerbi.py @@ -21,7 +21,6 @@ from unittest import mock import pytest -from providers.microsoft.azure.tests.conftest import get_airflow_connection from airflow.providers.microsoft.azure.hooks.powerbi import ( PowerBIDatasetRefreshException, @@ -29,6 +28,7 @@ ) from airflow.providers.microsoft.azure.triggers.powerbi import PowerBITrigger from airflow.triggers.base import TriggerEvent +from provider_tests.microsoft.azure.test_utils import get_airflow_connection POWERBI_CONN_ID = "powerbi_default" DATASET_ID = "dataset_id" diff --git a/providers/microsoft/azure/tests/system/__init__.py b/providers/microsoft/azure/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/microsoft/azure/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/microsoft/azure/tests/system/microsoft/__init__.py b/providers/microsoft/azure/tests/system/microsoft/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/microsoft/azure/tests/system/microsoft/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/microsoft/azure/tests/system/microsoft/azure/example_s3_to_wasb.py b/providers/microsoft/azure/tests/system/microsoft/azure/example_s3_to_wasb.py index e0609ebb252a6..b16ae721a03b9 100644 --- a/providers/microsoft/azure/tests/system/microsoft/azure/example_s3_to_wasb.py +++ b/providers/microsoft/azure/tests/system/microsoft/azure/example_s3_to_wasb.py @@ -19,8 +19,6 @@ from datetime import datetime -from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder - from airflow import DAG from airflow.models.baseoperator import chain from airflow.providers.amazon.aws.operators.s3 import ( @@ -31,6 +29,7 @@ ) from airflow.providers.microsoft.azure.transfers.s3_to_wasb import S3ToAzureBlobStorageOperator from airflow.utils.trigger_rule import TriggerRule +from system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() diff --git a/providers/microsoft/mssql/docs/index.rst b/providers/microsoft/mssql/docs/index.rst index b574f05bef995..d71a8e4e53dd5 100644 --- a/providers/microsoft/mssql/docs/index.rst +++ b/providers/microsoft/mssql/docs/index.rst @@ -57,7 +57,7 @@ :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/providers/microsoft/mssql/tests/conftest.py b/providers/microsoft/mssql/tests/conftest.py index 002424adb70d7..f56ccce0a3f69 100644 --- a/providers/microsoft/mssql/tests/conftest.py +++ b/providers/microsoft/mssql/tests/conftest.py @@ -16,43 +16,4 @@ # under the License. from __future__ import annotations -import pathlib -import re -from inspect import currentframe -from os.path import dirname, join, realpath - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) - - -def remove_license_header(content: str) -> str: - """ - Removes license header from the given content. - """ - # Define the pattern to match both block and single-line comments - pattern = r"(/\*.*?\*/)|(--.*?(\r?\n|\r))|(#.*?(\r?\n|\r))" - - # Check if there is a license header at the beginning of the file - if re.match(pattern, content, flags=re.DOTALL): - # Use re.DOTALL to allow .* to match newline characters in block comments - return re.sub(pattern, "", content, flags=re.DOTALL).strip() - return content.strip() - - -def load_file(*args: str, mode="r", encoding="utf-8"): - directory = currentframe().f_back.f_globals["__name__"].split(".")[:-1] # type: ignore - filename = join(dirname(realpath(__file__)), join(*directory), join(*args)) - with open(filename, mode=mode, encoding=encoding) as file: - if mode == "r": - return remove_license_header(file.read()) - return file.read() diff --git a/providers/microsoft/mssql/tests/integration/__init__.py b/providers/microsoft/mssql/tests/integration/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/microsoft/mssql/tests/integration/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/microsoft/mssql/tests/integration/microsoft/__init__.py b/providers/microsoft/mssql/tests/integration/microsoft/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/microsoft/mssql/tests/integration/microsoft/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/tests/integration/apache/kafka/sensors/__init__.py b/providers/microsoft/mssql/tests/integration/microsoft/mssql/__init__.py similarity index 100% rename from providers/tests/integration/apache/kafka/sensors/__init__.py rename to providers/microsoft/mssql/tests/integration/microsoft/mssql/__init__.py diff --git a/providers/tests/integration/apache/kafka/triggers/__init__.py b/providers/microsoft/mssql/tests/integration/microsoft/mssql/hooks/__init__.py similarity index 100% rename from providers/tests/integration/apache/kafka/triggers/__init__.py rename to providers/microsoft/mssql/tests/integration/microsoft/mssql/hooks/__init__.py diff --git a/providers/tests/integration/microsoft/mssql/hooks/test_mssql.py b/providers/microsoft/mssql/tests/integration/microsoft/mssql/hooks/test_mssql.py similarity index 100% rename from providers/tests/integration/microsoft/mssql/hooks/test_mssql.py rename to providers/microsoft/mssql/tests/integration/microsoft/mssql/hooks/test_mssql.py diff --git a/providers/microsoft/mssql/tests/provider_tests/microsoft/mssql/hooks/test_mssql.py b/providers/microsoft/mssql/tests/provider_tests/microsoft/mssql/hooks/test_mssql.py index 83f2a14686f83..295e659fb1bfe 100644 --- a/providers/microsoft/mssql/tests/provider_tests/microsoft/mssql/hooks/test_mssql.py +++ b/providers/microsoft/mssql/tests/provider_tests/microsoft/mssql/hooks/test_mssql.py @@ -21,11 +21,11 @@ import pytest import sqlalchemy -from providers.microsoft.mssql.tests.conftest import load_file from airflow.configuration import conf from airflow.models import Connection from airflow.providers.microsoft.mssql.dialects.mssql import MsSqlDialect +from provider_tests.microsoft.mssql.test_utils import load_file try: from airflow.providers.microsoft.mssql.hooks.mssql import MsSqlHook @@ -286,7 +286,7 @@ def test_generate_insert_sql(self, get_connection): ], replace=True, ) - assert sql == load_file("..", "resources", "replace.sql") + assert sql == load_file("resources", "replace.sql") def test_dialect_name(self): hook = MsSqlHook() diff --git a/providers/microsoft/mssql/tests/provider_tests/microsoft/mssql/test_utils.py b/providers/microsoft/mssql/tests/provider_tests/microsoft/mssql/test_utils.py new file mode 100644 index 0000000000000..6a19ed1fae306 --- /dev/null +++ b/providers/microsoft/mssql/tests/provider_tests/microsoft/mssql/test_utils.py @@ -0,0 +1,39 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import re +from os.path import dirname, join + + +def remove_license_header(content: str) -> str: + """Remove license header from the given content.""" + # Define the pattern to match both block and single-line comments + pattern = r"(/\*.*?\*/)|(--.*?(\r?\n|\r))|(#.*?(\r?\n|\r))" + + # Check if there is a license header at the beginning of the file + if re.match(pattern, content, flags=re.DOTALL): + # Use re.DOTALL to allow .* to match newline characters in block comments + return re.sub(pattern, "", content, flags=re.DOTALL).strip() + return content.strip() + + +def load_file(*args: str, mode="r", encoding="utf-8"): + with open(join(dirname(__file__), *args), mode=mode, encoding=encoding) as file: + if mode == "r": + return remove_license_header(file.read()) + return file.read() diff --git a/providers/microsoft/mssql/tests/system/__init__.py b/providers/microsoft/mssql/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/microsoft/mssql/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/microsoft/mssql/tests/system/microsoft/__init__.py b/providers/microsoft/mssql/tests/system/microsoft/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/microsoft/mssql/tests/system/microsoft/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/microsoft/psrp/tests/conftest.py b/providers/microsoft/psrp/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/microsoft/psrp/tests/conftest.py +++ b/providers/microsoft/psrp/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/microsoft/winrm/docs/index.rst b/providers/microsoft/winrm/docs/index.rst index 6dde76f4ee390..3e4aa07fbd761 100644 --- a/providers/microsoft/winrm/docs/index.rst +++ b/providers/microsoft/winrm/docs/index.rst @@ -55,7 +55,7 @@ :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/providers/microsoft/winrm/tests/conftest.py b/providers/microsoft/winrm/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/microsoft/winrm/tests/conftest.py +++ b/providers/microsoft/winrm/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/microsoft/winrm/tests/system/__init__.py b/providers/microsoft/winrm/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/microsoft/winrm/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/microsoft/winrm/tests/system/microsoft/__init__.py b/providers/microsoft/winrm/tests/system/microsoft/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/microsoft/winrm/tests/system/microsoft/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/mongo/tests/conftest.py b/providers/mongo/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/mongo/tests/conftest.py +++ b/providers/mongo/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/mongo/tests/integration/__init__.py b/providers/mongo/tests/integration/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/mongo/tests/integration/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/tests/integration/apache/pinot/__init__.py b/providers/mongo/tests/integration/mongo/__init__.py similarity index 100% rename from providers/tests/integration/apache/pinot/__init__.py rename to providers/mongo/tests/integration/mongo/__init__.py diff --git a/providers/tests/integration/apache/pinot/hooks/__init__.py b/providers/mongo/tests/integration/mongo/sensors/__init__.py similarity index 100% rename from providers/tests/integration/apache/pinot/hooks/__init__.py rename to providers/mongo/tests/integration/mongo/sensors/__init__.py diff --git a/providers/tests/integration/mongo/sensors/test_mongo.py b/providers/mongo/tests/integration/mongo/sensors/test_mongo.py similarity index 100% rename from providers/tests/integration/mongo/sensors/test_mongo.py rename to providers/mongo/tests/integration/mongo/sensors/test_mongo.py diff --git a/providers/mysql/tests/conftest.py b/providers/mysql/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/mysql/tests/conftest.py +++ b/providers/mysql/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/mysql/tests/system/__init__.py b/providers/mysql/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/mysql/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/neo4j/tests/conftest.py b/providers/neo4j/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/neo4j/tests/conftest.py +++ b/providers/neo4j/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/neo4j/tests/system/__init__.py b/providers/neo4j/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/neo4j/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/odbc/tests/conftest.py b/providers/odbc/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/odbc/tests/conftest.py +++ b/providers/odbc/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/odbc/tests/system/__init__.py b/providers/odbc/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/odbc/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/openai/tests/conftest.py b/providers/openai/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/openai/tests/conftest.py +++ b/providers/openai/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/openai/tests/system/__init__.py b/providers/openai/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/openai/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/openfaas/tests/conftest.py b/providers/openfaas/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/openfaas/tests/conftest.py +++ b/providers/openfaas/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/openlineage/docs/guides/developer.rst b/providers/openlineage/docs/guides/developer.rst index be2c0ac447d8e..b0722e1a248a5 100644 --- a/providers/openlineage/docs/guides/developer.rst +++ b/providers/openlineage/docs/guides/developer.rst @@ -287,7 +287,7 @@ To learn more about how Operators and Extractors work together under the hood, c When testing an Extractor, we want to firstly verify if ``OperatorLineage`` object is being created, specifically verifying that the object is being built with the correct input and output datasets and relevant facets. This is done in OpenLineage via pytest, with appropriate mocking and patching for connections and objects. -Check out `example tests `_. +Check out `example tests `_. Testing each facet is also important, as data or graphs in the UI can render incorrectly if the facets are wrong. For example, if the facet name is created incorrectly in the Extractor, then the Operator's task will not show up in the lineage graph, @@ -357,8 +357,8 @@ like extracting column level lineage and inputs/outputs from SQL query with SQL return lineage_metadata For more examples of OpenLineage Extractors, check out the source code of -`BashExtractor `_ or -`PythonExtractor `_. +`BashExtractor `_ or +`PythonExtractor `_. .. _inlets_outlets:openlineage: diff --git a/providers/openlineage/tests/conftest.py b/providers/openlineage/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/openlineage/tests/conftest.py +++ b/providers/openlineage/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/openlineage/tests/integration/__init__.py b/providers/openlineage/tests/integration/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/openlineage/tests/integration/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/tests/integration/google/__init__.py b/providers/openlineage/tests/integration/openlineage/__init__.py similarity index 100% rename from providers/tests/integration/google/__init__.py rename to providers/openlineage/tests/integration/openlineage/__init__.py diff --git a/providers/tests/integration/google/cloud/__init__.py b/providers/openlineage/tests/integration/openlineage/operators/__init__.py similarity index 100% rename from providers/tests/integration/google/cloud/__init__.py rename to providers/openlineage/tests/integration/openlineage/operators/__init__.py diff --git a/providers/openlineage/tests/system/__init__.py b/providers/openlineage/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/openlineage/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/openlineage/tests/system/openlineage/conftest.py b/providers/openlineage/tests/system/openlineage/conftest.py index 48d568b307ef8..260fc25eb48ea 100644 --- a/providers/openlineage/tests/system/openlineage/conftest.py +++ b/providers/openlineage/tests/system/openlineage/conftest.py @@ -20,8 +20,7 @@ from airflow.listeners.listener import get_listener_manager from airflow.providers.openlineage.plugins.listener import OpenLineageListener - -from providers.tests.system.openlineage.transport.variable import VariableTransport +from system.openlineage.transport.variable import VariableTransport @pytest.fixture(autouse=True) diff --git a/providers/openlineage/tests/system/openlineage/example_openlineage.py b/providers/openlineage/tests/system/openlineage/example_openlineage.py index 28b92540ef4b3..eec8070d5711e 100644 --- a/providers/openlineage/tests/system/openlineage/example_openlineage.py +++ b/providers/openlineage/tests/system/openlineage/example_openlineage.py @@ -19,10 +19,9 @@ from datetime import datetime from pathlib import Path -from providers.openlineage.tests.system.openlineage.operator import OpenLineageTestOperator - from airflow import DAG from airflow.providers.standard.operators.python import PythonOperator +from system.openlineage.operator import OpenLineageTestOperator def do_nothing(): diff --git a/providers/openlineage/tests/system/openlineage/example_openlineage_mapped_sensor.py b/providers/openlineage/tests/system/openlineage/example_openlineage_mapped_sensor.py index f49b6e591b325..e7e7d1e7ec6a4 100644 --- a/providers/openlineage/tests/system/openlineage/example_openlineage_mapped_sensor.py +++ b/providers/openlineage/tests/system/openlineage/example_openlineage_mapped_sensor.py @@ -20,12 +20,11 @@ from datetime import datetime, timedelta from pathlib import Path -from providers.openlineage.tests.system.openlineage.operator import OpenLineageTestOperator - from airflow import DAG from airflow.models import Variable from airflow.providers.standard.operators.python import PythonOperator from airflow.providers.standard.sensors.time_delta import TimeDeltaSensor +from system.openlineage.operator import OpenLineageTestOperator def my_task(task_number): diff --git a/providers/openlineage/tests/system/openlineage/transport/variable.py b/providers/openlineage/tests/system/openlineage/transport/variable.py index a4c508dea35f9..beeeac5aff6d0 100644 --- a/providers/openlineage/tests/system/openlineage/transport/variable.py +++ b/providers/openlineage/tests/system/openlineage/transport/variable.py @@ -18,10 +18,11 @@ from typing import TYPE_CHECKING -from airflow.models.variable import Variable from openlineage.client.serde import Serde from openlineage.client.transport import Transport, get_default_factory +from airflow.models.variable import Variable + if TYPE_CHECKING: from openlineage.client.client import Event diff --git a/providers/opensearch/tests/conftest.py b/providers/opensearch/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/opensearch/tests/conftest.py +++ b/providers/opensearch/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/opensearch/tests/system/__init__.py b/providers/opensearch/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/opensearch/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/opsgenie/tests/conftest.py b/providers/opsgenie/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/opsgenie/tests/conftest.py +++ b/providers/opsgenie/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/opsgenie/tests/system/__init__.py b/providers/opsgenie/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/opsgenie/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/oracle/pyproject.toml b/providers/oracle/pyproject.toml index aebe73160f30e..8d01bf2371ab9 100644 --- a/providers/oracle/pyproject.toml +++ b/providers/oracle/pyproject.toml @@ -64,7 +64,7 @@ dependencies = [ # Any change in the dependencies is preserved when the file is regenerated [project.optional-dependencies] "numpy" = [ - "numpy", + "numpy>=1.26.0", ] [project.urls] diff --git a/providers/oracle/src/airflow/providers/oracle/get_provider_info.py b/providers/oracle/src/airflow/providers/oracle/get_provider_info.py index 986569809157f..2de14b29fd3b6 100644 --- a/providers/oracle/src/airflow/providers/oracle/get_provider_info.py +++ b/providers/oracle/src/airflow/providers/oracle/get_provider_info.py @@ -97,5 +97,5 @@ def get_provider_info(): "apache-airflow-providers-common-sql>=1.20.0", "oracledb>=2.0.0", ], - "optional-dependencies": {"numpy": ["numpy"]}, + "optional-dependencies": {"numpy": ["numpy>=1.26.0"]}, } diff --git a/providers/oracle/tests/conftest.py b/providers/oracle/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/oracle/tests/conftest.py +++ b/providers/oracle/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/oracle/tests/system/__init__.py b/providers/oracle/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/oracle/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/pagerduty/tests/conftest.py b/providers/pagerduty/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/pagerduty/tests/conftest.py +++ b/providers/pagerduty/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/papermill/tests/conftest.py b/providers/papermill/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/papermill/tests/conftest.py +++ b/providers/papermill/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/papermill/tests/system/__init__.py b/providers/papermill/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/papermill/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/pgvector/tests/conftest.py b/providers/pgvector/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/pgvector/tests/conftest.py +++ b/providers/pgvector/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/pgvector/tests/system/__init__.py b/providers/pgvector/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/pgvector/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/pinecone/tests/conftest.py b/providers/pinecone/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/pinecone/tests/conftest.py +++ b/providers/pinecone/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/pinecone/tests/system/__init__.py b/providers/pinecone/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/pinecone/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/postgres/tests/conftest.py b/providers/postgres/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/postgres/tests/conftest.py +++ b/providers/postgres/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/postgres/tests/system/__init__.py b/providers/postgres/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/postgres/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/presto/tests/conftest.py b/providers/presto/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/presto/tests/conftest.py +++ b/providers/presto/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/presto/tests/system/__init__.py b/providers/presto/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/presto/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/pyproject.toml b/providers/pyproject.toml deleted file mode 100644 index 29cbbbb016053..0000000000000 --- a/providers/pyproject.toml +++ /dev/null @@ -1,93 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -[build-system] -requires = ["hatchling", "hatch-vcs"] -build-backend = "hatchling.build" - -[project] -name = "local-providers" -version = "0.1.0" -description = "Placeholder package for local/from-sources providers." -requires-python = ">=3.9, <3.13" -classifiers = [ - "Private :: Do Not Upload", -] - -[tool.hatch.publish.index] -# Lets make doubly sure this never goes to PyPi -disable = true - -[tool.hatch.build.targets.wheel] -packages = ["src/airflow"] -exclude = [ - ".gitignore", - ".latest-doc-only-change.txt", - "CHANGELOG.rst", - "MANAGING_PROVIDERS_LIFECYCLE.rst", - "src/airflow/__init__.py" # This file only exists to make pyright/VSCode happy, don't ship it -] - -[tool.ruff] -extend = "../pyproject.toml" -src = ["src"] -namespace-packages = ["src/airflow/providers"] -extend-exclude = [ - # The files generated by stubgen aren't 100% valid syntax it turns out, and we don't ship them, so we can - # ignore them in ruff - "src/airflow/providers/common/sql/*/*.pyi", -] - -[tool.ruff.lint.per-file-ignores] - -# Ignore Doc rules et al for anything outside of tests -"!src/*" = ["D", "TID253", "S101", "TRY002"] - -# All of the modules which have an extra license header (i.e. that we copy from another project) need to -# ignore E402 -- module level import not at top level -"tests/amazon/aws/auth_manager/security_manager/test_aws_security_manager_override.py" = ["E402"] -"tests/common/io/xcom/test_backend.py" = ["E402"] -"tests/elasticsearch/log/elasticmock/__init__.py" = ["E402"] -"tests/elasticsearch/log/elasticmock/utilities/__init__.py" = ["E402"] -"tests/google/cloud/hooks/vertex_ai/test_batch_prediction_job.py" = ["E402"] -"tests/google/cloud/hooks/vertex_ai/test_hyperparameter_tuning_job.py" = ["E402"] -"tests/google/cloud/hooks/vertex_ai/test_auto_ml.py" = ["E402"] -"tests/google/cloud/hooks/vertex_ai/test_custom_job.py" = ["E402"] -"tests/google/cloud/hooks/vertex_ai/test_dataset.py" = ["E402"] -"tests/google/cloud/hooks/vertex_ai/test_endpoint_service.py" = ["E402"] -"tests/google/cloud/hooks/vertex_ai/test_generative_model.py" = ["E402"] -"tests/google/cloud/hooks/vertex_ai/test_model_service.py" = ["E402"] -"tests/google/cloud/hooks/vertex_ai/test_pipeline_job.py" = ["E402"] -"tests/google/cloud/hooks/vertex_ai/test_prediction_service.py" = ["E402"] -"tests/google/cloud/links/test_translate.py" = ["E402"] -"tests/google/cloud/operators/test_automl.py"= ["E402"] -"tests/google/cloud/operators/test_vertex_ai.py" = ["E402"] -"tests/google/cloud/operators/vertex_ai/test_generative_model.py" = ["E402"] -"tests/google/cloud/triggers/test_vertex_ai.py" = ["E402"] -"tests/openai/hooks/test_openai.py" = ["E402"] -"tests/openai/operators/test_openai.py" = ["E402"] -"tests/openai/triggers/test_openai.py" = ["E402"] -"tests/opensearch/conftest.py" = ["E402"] -"tests/opensearch/hooks/test_opensearch.py" = ["E402"] -"tests/opensearch/log/test_os_json_formatter.py" = ["E402"] -"tests/opensearch/log/test_os_response.py" = ["E402"] -"tests/opensearch/log/test_os_task_handler.py" = ["E402"] -"tests/opensearch/operators/test_opensearch.py" = ["E402"] -"tests/qdrant/hooks/test_qdrant.py" = ["E402"] -"tests/qdrant/operators/test_qdrant.py" = ["E402"] -"tests/snowflake/operators/test_snowflake_sql.py" = ["E402"] -"tests/yandex/**/*.py" = ["E402"] diff --git a/providers/qdrant/tests/conftest.py b/providers/qdrant/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/qdrant/tests/conftest.py +++ b/providers/qdrant/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/qdrant/tests/integration/__init__.py b/providers/qdrant/tests/integration/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/qdrant/tests/integration/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/tests/integration/google/cloud/transfers/__init__.py b/providers/qdrant/tests/integration/qdrant/__init__.py similarity index 100% rename from providers/tests/integration/google/cloud/transfers/__init__.py rename to providers/qdrant/tests/integration/qdrant/__init__.py diff --git a/providers/tests/integration/microsoft/__init__.py b/providers/qdrant/tests/integration/qdrant/hooks/__init__.py similarity index 100% rename from providers/tests/integration/microsoft/__init__.py rename to providers/qdrant/tests/integration/qdrant/hooks/__init__.py diff --git a/providers/tests/integration/qdrant/hooks/test_qdrant.py b/providers/qdrant/tests/integration/qdrant/hooks/test_qdrant.py similarity index 100% rename from providers/tests/integration/qdrant/hooks/test_qdrant.py rename to providers/qdrant/tests/integration/qdrant/hooks/test_qdrant.py diff --git a/providers/tests/integration/microsoft/mssql/__init__.py b/providers/qdrant/tests/integration/qdrant/operators/__init__.py similarity index 100% rename from providers/tests/integration/microsoft/mssql/__init__.py rename to providers/qdrant/tests/integration/qdrant/operators/__init__.py diff --git a/providers/tests/integration/qdrant/operators/test_qdrant_ingest.py b/providers/qdrant/tests/integration/qdrant/operators/test_qdrant_ingest.py similarity index 100% rename from providers/tests/integration/qdrant/operators/test_qdrant_ingest.py rename to providers/qdrant/tests/integration/qdrant/operators/test_qdrant_ingest.py diff --git a/providers/qdrant/tests/system/__init__.py b/providers/qdrant/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/qdrant/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/redis/tests/conftest.py b/providers/redis/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/redis/tests/conftest.py +++ b/providers/redis/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/redis/tests/integration/__init__.py b/providers/redis/tests/integration/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/redis/tests/integration/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/tests/integration/microsoft/mssql/hooks/__init__.py b/providers/redis/tests/integration/redis/__init__.py similarity index 100% rename from providers/tests/integration/microsoft/mssql/hooks/__init__.py rename to providers/redis/tests/integration/redis/__init__.py diff --git a/providers/tests/integration/mongo/__init__.py b/providers/redis/tests/integration/redis/hooks/__init__.py similarity index 100% rename from providers/tests/integration/mongo/__init__.py rename to providers/redis/tests/integration/redis/hooks/__init__.py diff --git a/providers/tests/integration/redis/hooks/test_redis.py b/providers/redis/tests/integration/redis/hooks/test_redis.py similarity index 100% rename from providers/tests/integration/redis/hooks/test_redis.py rename to providers/redis/tests/integration/redis/hooks/test_redis.py diff --git a/providers/tests/integration/mongo/sensors/__init__.py b/providers/redis/tests/integration/redis/operators/__init__.py similarity index 100% rename from providers/tests/integration/mongo/sensors/__init__.py rename to providers/redis/tests/integration/redis/operators/__init__.py diff --git a/providers/tests/integration/redis/operators/test_redis_publish.py b/providers/redis/tests/integration/redis/operators/test_redis_publish.py similarity index 100% rename from providers/tests/integration/redis/operators/test_redis_publish.py rename to providers/redis/tests/integration/redis/operators/test_redis_publish.py diff --git a/providers/tests/integration/openlineage/__init__.py b/providers/redis/tests/integration/redis/sensors/__init__.py similarity index 100% rename from providers/tests/integration/openlineage/__init__.py rename to providers/redis/tests/integration/redis/sensors/__init__.py diff --git a/providers/tests/integration/redis/sensors/test_redis_key.py b/providers/redis/tests/integration/redis/sensors/test_redis_key.py similarity index 100% rename from providers/tests/integration/redis/sensors/test_redis_key.py rename to providers/redis/tests/integration/redis/sensors/test_redis_key.py diff --git a/providers/tests/integration/redis/sensors/test_redis_pub_sub.py b/providers/redis/tests/integration/redis/sensors/test_redis_pub_sub.py similarity index 100% rename from providers/tests/integration/redis/sensors/test_redis_pub_sub.py rename to providers/redis/tests/integration/redis/sensors/test_redis_pub_sub.py diff --git a/providers/redis/tests/system/__init__.py b/providers/redis/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/redis/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/salesforce/tests/conftest.py b/providers/salesforce/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/salesforce/tests/conftest.py +++ b/providers/salesforce/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/salesforce/tests/system/__init__.py b/providers/salesforce/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/salesforce/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/samba/tests/conftest.py b/providers/samba/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/samba/tests/conftest.py +++ b/providers/samba/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/samba/tests/system/__init__.py b/providers/samba/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/samba/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/samba/tests/system/samba/example_gcs_to_samba.py b/providers/samba/tests/system/samba/example_gcs_to_samba.py index f2e93ebffb5d7..0504f4fd0548f 100644 --- a/providers/samba/tests/system/samba/example_gcs_to_samba.py +++ b/providers/samba/tests/system/samba/example_gcs_to_samba.py @@ -25,13 +25,12 @@ from datetime import datetime from pathlib import Path -from providers.google.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - from airflow import models from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator from airflow.providers.samba.transfers.gcs_to_samba import GCSToSambaOperator from airflow.utils.trigger_rule import TriggerRule +from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/providers/segment/tests/conftest.py b/providers/segment/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/segment/tests/conftest.py +++ b/providers/segment/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/sendgrid/tests/conftest.py b/providers/sendgrid/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/sendgrid/tests/conftest.py +++ b/providers/sendgrid/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/sftp/tests/conftest.py b/providers/sftp/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/sftp/tests/conftest.py +++ b/providers/sftp/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/sftp/tests/system/__init__.py b/providers/sftp/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/sftp/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/singularity/tests/conftest.py b/providers/singularity/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/singularity/tests/conftest.py +++ b/providers/singularity/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/singularity/tests/system/__init__.py b/providers/singularity/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/singularity/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/slack/tests/conftest.py b/providers/slack/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/slack/tests/conftest.py +++ b/providers/slack/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/slack/tests/system/__init__.py b/providers/slack/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/slack/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/smtp/tests/conftest.py b/providers/smtp/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/smtp/tests/conftest.py +++ b/providers/smtp/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/snowflake/src/airflow/providers/snowflake/operators/snowflake.py b/providers/snowflake/src/airflow/providers/snowflake/operators/snowflake.py index d2b7479dff38b..2e0686138cb58 100644 --- a/providers/snowflake/src/airflow/providers/snowflake/operators/snowflake.py +++ b/providers/snowflake/src/airflow/providers/snowflake/operators/snowflake.py @@ -43,7 +43,7 @@ class SnowflakeCheckOperator(SQLCheckOperator): """ - Performs a check against Snowflake. + Perform a check against Snowflake. The ``SnowflakeCheckOperator`` expects a sql query that will return a single row. Each value on that first row is evaluated using python ``bool`` casting. If any of the values @@ -294,8 +294,9 @@ def __init__( class SnowflakeSqlApiOperator(SQLExecuteQueryOperator): """ - Implemented Snowflake SQL API Operator to support multiple SQL statements sequentially, - which is the behavior of the SQLExecuteQueryOperator, the Snowflake SQL API allows submitting + Implemented Snowflake SQL API Operator to support multiple SQL statements sequentially. + + This is the behavior of the SQLExecuteQueryOperator, the Snowflake SQL API allows submitting multiple SQL statements in a single request. It make post request to submit SQL statements for execution, poll to check the status of the execution of a statement. Fetch query results concurrently. diff --git a/providers/snowflake/tests/conftest.py b/providers/snowflake/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/snowflake/tests/conftest.py +++ b/providers/snowflake/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/snowflake/tests/system/__init__.py b/providers/snowflake/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/snowflake/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/sqlite/tests/conftest.py b/providers/sqlite/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/sqlite/tests/conftest.py +++ b/providers/sqlite/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/sqlite/tests/system/__init__.py b/providers/sqlite/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/sqlite/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/src/README.md b/providers/src/README.md deleted file mode 100644 index da4590ac483f1..0000000000000 --- a/providers/src/README.md +++ /dev/null @@ -1,44 +0,0 @@ - - -This is where old provider structure is stored. All providers will soon be moved to the new structure. - -The new structure looks as follows: - -```text -providers - |- PROVIDER_ID - | |- src - | | |-airflow - | | |- providers - | | |- PROVIDER_ID - | |- tests - | | |- providers - | | |- PROVIDER_ID - | |- docs - | | |- .latest-doc-only-changes.txt - | |- pyproject.toml - | |- CHANGELOG.rst - | |- provider.yaml - | |- README.rst - |- PROVIDER_ID2 - ... -``` - -TODO: describe and semi-automate the migration process. diff --git a/providers/src/airflow/__init__.py b/providers/src/airflow/__init__.py deleted file mode 100644 index 716fea5d47253..0000000000000 --- a/providers/src/airflow/__init__.py +++ /dev/null @@ -1,21 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# Pycharm needs to see this line. VSCode/pyright doesn't care about it, but this file needs to exist -# https://github.com/microsoft/pyright/issues/9439#issuecomment-2468990559 -__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/src/airflow/providers/.gitignore b/providers/src/airflow/providers/.gitignore deleted file mode 100644 index 528066d9003e2..0000000000000 --- a/providers/src/airflow/providers/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/__init__.py diff --git a/providers/src/airflow/providers/__init__.py b/providers/src/airflow/providers/__init__.py deleted file mode 100644 index 716fea5d47253..0000000000000 --- a/providers/src/airflow/providers/__init__.py +++ /dev/null @@ -1,21 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# Pycharm needs to see this line. VSCode/pyright doesn't care about it, but this file needs to exist -# https://github.com/microsoft/pyright/issues/9439#issuecomment-2468990559 -__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/ssh/tests/conftest.py b/providers/ssh/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/ssh/tests/conftest.py +++ b/providers/ssh/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/standard/tests/conftest.py b/providers/standard/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/standard/tests/conftest.py +++ b/providers/standard/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/tableau/tests/conftest.py b/providers/tableau/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/tableau/tests/conftest.py +++ b/providers/tableau/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/tableau/tests/system/__init__.py b/providers/tableau/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/tableau/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/telegram/tests/conftest.py b/providers/telegram/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/telegram/tests/conftest.py +++ b/providers/telegram/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/telegram/tests/system/__init__.py b/providers/telegram/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/telegram/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/teradata/tests/conftest.py b/providers/teradata/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/teradata/tests/conftest.py +++ b/providers/teradata/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/teradata/tests/system/__init__.py b/providers/teradata/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/teradata/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/tests/conftest.py b/providers/tests/conftest.py deleted file mode 100644 index 85ffd8f1d9078..0000000000000 --- a/providers/tests/conftest.py +++ /dev/null @@ -1,68 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from __future__ import annotations - -import pathlib -from unittest import mock - -import pytest - -pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - dep_path = [pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml")] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) - - -@pytest.fixture -def hook_conn(request): - """ - Patch ``BaseHook.get_connection()`` by mock value. - - This fixture optionally parametrized, if ``param`` not set or empty it just mock method. - If param is dictionary or :class:`~airflow.models.Connection` than return it, - If param is exception than add side effect. - Otherwise, it raises an error - """ - from airflow.models import Connection - - try: - conn = request.param - except AttributeError: - conn = None - - with mock.patch("airflow.hooks.base.BaseHook.get_connection") as m: - if not conn: - pass # Don't do anything if param not specified or empty - elif isinstance(conn, dict): - m.return_value = Connection(**conn) - elif not isinstance(conn, Connection): - m.return_value = conn - elif isinstance(conn, Exception): - m.side_effect = conn - else: - raise TypeError( - f"{request.node.name!r}: expected dict, Connection object or Exception, " - f"but got {type(conn).__name__}" - ) - - yield m diff --git a/providers/tests/integration/redis/sensors/__init__.py b/providers/tests/integration/redis/sensors/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/tests/integration/redis/sensors/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/tests/integration/trino/__init__.py b/providers/tests/integration/trino/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/tests/integration/trino/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/tests/integration/trino/hooks/__init__.py b/providers/tests/integration/trino/hooks/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/tests/integration/trino/hooks/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/tests/integration/ydb/__init__.py b/providers/tests/integration/ydb/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/tests/integration/ydb/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/tests/integration/ydb/hooks/__init__.py b/providers/tests/integration/ydb/hooks/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/tests/integration/ydb/hooks/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/tests/integration/ydb/operators/__init__.py b/providers/tests/integration/ydb/operators/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/tests/integration/ydb/operators/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/tests/system/__init__.py b/providers/tests/system/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/tests/system/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/tests/system/cncf/__init__.py b/providers/tests/system/cncf/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/tests/system/cncf/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/tests/system/conftest.py b/providers/tests/system/conftest.py deleted file mode 100644 index 0897c2af92410..0000000000000 --- a/providers/tests/system/conftest.py +++ /dev/null @@ -1,50 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -import itertools -import os -import re - -import pytest - -REQUIRED_ENV_VARS = ("SYSTEM_TESTS_ENV_ID",) - - -@pytest.fixture -def provider_env_vars(): - """Override this fixture in provider's conftest.py""" - return () - - -@pytest.fixture(autouse=True) -def skip_if_env_var_not_set(provider_env_vars: list[str]) -> None: - for env in itertools.chain(REQUIRED_ENV_VARS, provider_env_vars): - if env not in os.environ: - pytest.skip(f"Missing required environment variable {env}") - return - - -def pytest_collection_modifyitems(config: pytest.Config, items: list[pytest.Item]) -> None: - """Add @pytest.mark.system(provider_name) for every system test.""" - rootdir = config.rootpath - for item in items: - rel_path = item.path.relative_to(rootdir) - # Provider system tests - match = re.match(".*tests/system/([^/]+)", str(rel_path)) - if match: - item.add_marker(pytest.mark.system) diff --git a/providers/tests/system/dbt/__init__.py b/providers/tests/system/dbt/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/tests/system/dbt/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/tests/system/example_empty.py b/providers/tests/system/example_empty.py deleted file mode 100644 index ca81ea08d0be5..0000000000000 --- a/providers/tests/system/example_empty.py +++ /dev/null @@ -1,48 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -from datetime import datetime - -from airflow.models.baseoperator import chain -from airflow.models.dag import DAG -from airflow.providers.standard.operators.empty import EmptyOperator - -DAG_ID = "example_empty" - -with DAG( - dag_id=DAG_ID, - schedule="@once", - start_date=datetime(2021, 1, 1), - tags=["example"], - catchup=False, -) as dag: - task = EmptyOperator(task_id="task") - - chain(task) - - from tests_common.test_utils.watcher import watcher - - # This test needs watcher in order to properly mark success/failure - # when "tearDown" task with trigger rule is part of the DAG - list(dag.tasks) >> watcher() - - -from tests_common.test_utils.system_tests import get_test_run # noqa: E402 - -# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) -test_run = get_test_run(dag) diff --git a/providers/tests/system/microsoft/__init__.py b/providers/tests/system/microsoft/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/tests/system/microsoft/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/trino/tests/conftest.py b/providers/trino/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/trino/tests/conftest.py +++ b/providers/trino/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/trino/tests/integration/__init__.py b/providers/trino/tests/integration/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/trino/tests/integration/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/tests/integration/openlineage/operators/__init__.py b/providers/trino/tests/integration/trino/__init__.py similarity index 100% rename from providers/tests/integration/openlineage/operators/__init__.py rename to providers/trino/tests/integration/trino/__init__.py diff --git a/providers/tests/integration/qdrant/__init__.py b/providers/trino/tests/integration/trino/hooks/__init__.py similarity index 100% rename from providers/tests/integration/qdrant/__init__.py rename to providers/trino/tests/integration/trino/hooks/__init__.py diff --git a/providers/tests/integration/trino/hooks/test_trino.py b/providers/trino/tests/integration/trino/hooks/test_trino.py similarity index 100% rename from providers/tests/integration/trino/hooks/test_trino.py rename to providers/trino/tests/integration/trino/hooks/test_trino.py diff --git a/providers/trino/tests/system/__init__.py b/providers/trino/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/trino/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/vertica/tests/conftest.py b/providers/vertica/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/vertica/tests/conftest.py +++ b/providers/vertica/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/vertica/tests/system/__init__.py b/providers/vertica/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/vertica/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/weaviate/tests/conftest.py b/providers/weaviate/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/weaviate/tests/conftest.py +++ b/providers/weaviate/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/weaviate/tests/system/__init__.py b/providers/weaviate/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/weaviate/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/weaviate/tests/system/weaviate/example_weaviate_dynamic_mapping_dag.py b/providers/weaviate/tests/system/weaviate/example_weaviate_dynamic_mapping_dag.py index 9dce7e738361c..b3dfb52a60fb9 100644 --- a/providers/weaviate/tests/system/weaviate/example_weaviate_dynamic_mapping_dag.py +++ b/providers/weaviate/tests/system/weaviate/example_weaviate_dynamic_mapping_dag.py @@ -17,10 +17,10 @@ from __future__ import annotations import pendulum +from weaviate.collections.classes.config import Configure from airflow.decorators import dag, setup, task, teardown from airflow.providers.weaviate.operators.weaviate import WeaviateIngestOperator -from weaviate.collections.classes.config import Configure COLLECTION_NAMES = ["Weaviate_DTM_example_collection_1", "Weaviate_DTM_example_collection_2"] diff --git a/providers/weaviate/tests/system/weaviate/example_weaviate_operator.py b/providers/weaviate/tests/system/weaviate/example_weaviate_operator.py index a91b4c3fe62e2..734a7e5fa3945 100644 --- a/providers/weaviate/tests/system/weaviate/example_weaviate_operator.py +++ b/providers/weaviate/tests/system/weaviate/example_weaviate_operator.py @@ -17,14 +17,14 @@ from __future__ import annotations import pendulum +from weaviate.classes.config import DataType, Property +from weaviate.collections.classes.config import Configure from airflow.decorators import dag, task, teardown from airflow.providers.weaviate.operators.weaviate import ( WeaviateDocumentIngestOperator, WeaviateIngestOperator, ) -from weaviate.classes.config import DataType, Property -from weaviate.collections.classes.config import Configure COLLECTION_NAME = "QuestionWithoutVectorizerUsingOperator" diff --git a/providers/weaviate/tests/system/weaviate/example_weaviate_using_hook.py b/providers/weaviate/tests/system/weaviate/example_weaviate_using_hook.py index f84e967101d20..8df913e0044fd 100644 --- a/providers/weaviate/tests/system/weaviate/example_weaviate_using_hook.py +++ b/providers/weaviate/tests/system/weaviate/example_weaviate_using_hook.py @@ -17,11 +17,11 @@ from __future__ import annotations import pendulum - -from airflow.decorators import dag, task, teardown from weaviate.classes.config import DataType, Property from weaviate.collections.classes.config import Configure +from airflow.decorators import dag, task, teardown + COLLECTION_NAME = "QuestionWithOpenAIVectorizerUsingHook" diff --git a/providers/weaviate/tests/system/weaviate/example_weaviate_vectorizer_dag.py b/providers/weaviate/tests/system/weaviate/example_weaviate_vectorizer_dag.py index 36a3389b19963..17b7e25edb3cb 100644 --- a/providers/weaviate/tests/system/weaviate/example_weaviate_vectorizer_dag.py +++ b/providers/weaviate/tests/system/weaviate/example_weaviate_vectorizer_dag.py @@ -17,10 +17,10 @@ from __future__ import annotations import pendulum +from weaviate.collections.classes.config import Configure from airflow.decorators import dag, setup, task, teardown from airflow.providers.weaviate.operators.weaviate import WeaviateIngestOperator -from weaviate.collections.classes.config import Configure COLLECTION_NAME = "Weaviate_with_vectorizer_example_collection" diff --git a/providers/yandex/tests/conftest.py b/providers/yandex/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/yandex/tests/conftest.py +++ b/providers/yandex/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/yandex/tests/system/__init__.py b/providers/yandex/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/yandex/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/yandex/tests/system/yandex/example_yandexcloud.py b/providers/yandex/tests/system/yandex/example_yandexcloud.py index 1e1d4ae417d9a..3cb7226208baa 100644 --- a/providers/yandex/tests/system/yandex/example_yandexcloud.py +++ b/providers/yandex/tests/system/yandex/example_yandexcloud.py @@ -18,9 +18,6 @@ from datetime import datetime -from google.protobuf.json_format import MessageToDict -from yandexcloud.operations import OperationError - import yandex.cloud.dataproc.v1.cluster_pb2 as cluster_pb import yandex.cloud.dataproc.v1.cluster_service_pb2 as cluster_service_pb import yandex.cloud.dataproc.v1.cluster_service_pb2_grpc as cluster_service_grpc_pb @@ -29,6 +26,9 @@ import yandex.cloud.dataproc.v1.job_service_pb2 as job_service_pb import yandex.cloud.dataproc.v1.job_service_pb2_grpc as job_service_grpc_pb import yandex.cloud.dataproc.v1.subcluster_pb2 as subcluster_pb +from google.protobuf.json_format import MessageToDict +from yandexcloud.operations import OperationError + from airflow import DAG from airflow.decorators import task from airflow.providers.yandex.hooks.yandex import YandexCloudBaseHook diff --git a/providers/ydb/tests/conftest.py b/providers/ydb/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/ydb/tests/conftest.py +++ b/providers/ydb/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/ydb/tests/integration/__init__.py b/providers/ydb/tests/integration/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/ydb/tests/integration/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/tests/integration/qdrant/hooks/__init__.py b/providers/ydb/tests/integration/ydb/__init__.py similarity index 100% rename from providers/tests/integration/qdrant/hooks/__init__.py rename to providers/ydb/tests/integration/ydb/__init__.py diff --git a/providers/tests/integration/qdrant/operators/__init__.py b/providers/ydb/tests/integration/ydb/hooks/__init__.py similarity index 100% rename from providers/tests/integration/qdrant/operators/__init__.py rename to providers/ydb/tests/integration/ydb/hooks/__init__.py diff --git a/providers/tests/integration/redis/__init__.py b/providers/ydb/tests/integration/ydb/operators/__init__.py similarity index 100% rename from providers/tests/integration/redis/__init__.py rename to providers/ydb/tests/integration/ydb/operators/__init__.py diff --git a/providers/tests/integration/ydb/operators/test_ydb.py b/providers/ydb/tests/integration/ydb/operators/test_ydb.py similarity index 100% rename from providers/tests/integration/ydb/operators/test_ydb.py rename to providers/ydb/tests/integration/ydb/operators/test_ydb.py diff --git a/providers/ydb/tests/system/__init__.py b/providers/ydb/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/ydb/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/ydb/tests/system/ydb/example_ydb.py b/providers/ydb/tests/system/ydb/example_ydb.py index 61f7373c7229d..cd2d4835d49a6 100644 --- a/providers/ydb/tests/system/ydb/example_ydb.py +++ b/providers/ydb/tests/system/ydb/example_ydb.py @@ -20,6 +20,7 @@ import os import ydb + from airflow import DAG from airflow.decorators import task from airflow.providers.ydb.hooks.ydb import YDBHook diff --git a/providers/zendesk/tests/conftest.py b/providers/zendesk/tests/conftest.py index 068fe6bbf5ae9..f56ccce0a3f69 100644 --- a/providers/zendesk/tests/conftest.py +++ b/providers/zendesk/tests/conftest.py @@ -16,17 +16,4 @@ # under the License. from __future__ import annotations -import pathlib - -import pytest - pytest_plugins = "tests_common.pytest_plugin" - - -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") - dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) diff --git a/providers/zendesk/tests/system/__init__.py b/providers/zendesk/tests/system/__init__.py new file mode 100644 index 0000000000000..e8fd22856438c --- /dev/null +++ b/providers/zendesk/tests/system/__init__.py @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/pyproject.toml b/pyproject.toml index fadc418a9856d..52b5827ae8365 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -328,12 +328,8 @@ testing = ["dev", "providers.tests", "task_sdk.tests", "tests_common", "tests"] # Those are needed so that __init__.py chaining of packages properly works for IDEs # the first non-comment line of such empty __init__.py files should be: # __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore -"providers/src/airflow/providers/__init__.py" = ["I002"] -"providers/src/airflow/__init__.py" = ["I002"] "providers/*/tests/*/__init__.py" = ["I002"] "providers/*/*/tests/*/*/__init__.py" = ["I002"] -"providers/tests/__init__.py" = ["I002"] -"providers/__init__.py" = ["I002"] # The test_python.py is needed because adding __future__.annotations breaks runtime checks that are # needed for the test to work @@ -377,6 +373,9 @@ testing = ["dev", "providers.tests", "task_sdk.tests", "tests_common", "tests"] "docker_tests/*" = ["D", "TID253", "S101", "TRY002"] "kubernetes_tests/*" = ["D", "TID253", "S101", "TRY002"] "helm_tests/*" = ["D", "TID253", "S101", "TRY002"] +"providers/**/tests/*" = ["D", "TID253", "S101", "TRY002"] +# exclude assert in edge provider +"providers/edge/src/airflow/providers/edge/example_dags/integration_test.py" = ["S101"] # All of the modules which have an extra license header (i.e. that we copy from another project) need to # ignore E402 -- module level import not at top level @@ -403,6 +402,10 @@ testing = ["dev", "providers.tests", "task_sdk.tests", "tests_common", "tests"] "providers/qdrant/tests/provider_tests/qdrant/hooks/test_qdrant.py" = ["E402"] "providers/qdrant/tests/provider_tests/qdrant/operators/test_qdrant.py" = ["E402"] +# Remove some docstring rules from files that do not need docstrings +"providers/apache/hdfs/src/airflow/providers/apache/hdfs/sensors/hdfs.py" = ["D101"] + + # All the modules which do not follow B028 yet: https://docs.astral.sh/ruff/rules/no-explicit-stacklevel/ "helm_tests/airflow_aux/test_basic_helm_chart.py" = ["B028"] @@ -411,6 +414,10 @@ testing = ["dev", "providers.tests", "task_sdk.tests", "tests_common", "tests"] "tests_common/test_utils/compat.py" = ["TID251", "F401"] "tests_common/pytest_plugin.py" = ["F811"] +# While pandas import is banned, sql.pyi should be excluded from it as it does not have a TYPE_CHECKING +# mechanism and whole .pyi is really "type-checking" only +"providers/common/sql/src/airflow/providers/common/sql/hooks/sql.pyi" = ["TID253"] + [tool.ruff.lint.flake8-tidy-imports] # Disallow all relative imports. ban-relative-imports = "all" @@ -589,14 +596,12 @@ disable_error_code = [ "annotation-unchecked", ] # Since there are no __init__.py files in -# providers/src/apache/airflow/providers we need to tell MyPy where the "base" +# providers/src/apache/airflow/providers we need to tell MyPy where the "base" # is, otherwise when it sees -# providers/src/apache/airflow/providers/redis/__init__.py, it thinks this is -# the toplevel `redis` folder. +# TODO(potiuk): check if those providers/*/src are still needed explicit_package_bases = true mypy_path = [ "$MYPY_CONFIG_FILE_DIR", - "$MYPY_CONFIG_FILE_DIR/providers/src", "$MYPY_CONFIG_FILE_DIR/task_sdk/src", "$MYPY_CONFIG_FILE_DIR/providers/airbyte/src", "$MYPY_CONFIG_FILE_DIR/providers/celery/src", @@ -636,8 +641,7 @@ ignore_errors = true [dependency-groups] dev = [ - # TODO(potiuk): remove me when all providers are moved to new structure - "local-providers", + "apache-airflow", "apache-airflow-providers-airbyte", "apache-airflow-providers-alibaba", "apache-airflow-providers-amazon", @@ -737,7 +741,7 @@ dev = [ [tool.uv.sources] # These names must match the names as defined in the pyproject.toml of the workspace items, # *not* the workspace folder paths -local-providers = { workspace = true } +apache-airflow = {workspace = true} apache-airflow-providers-airbyte = {workspace = true} apache-airflow-providers-alibaba = { workspace = true } apache-airflow-providers-amazon = { workspace = true } @@ -835,7 +839,6 @@ apache-airflow-task-sdk = { workspace = true } [tool.uv.workspace] members = [ - "providers", "providers/airbyte", "providers/alibaba", "providers/amazon", @@ -929,5 +932,6 @@ members = [ "providers/yandex", "providers/ydb", "providers/zendesk", + ".", "task_sdk", ] diff --git a/scripts/ci/docker-compose/remove-sources.yml b/scripts/ci/docker-compose/remove-sources.yml index 51ed19d52cd7c..51672071c897d 100644 --- a/scripts/ci/docker-compose/remove-sources.yml +++ b/scripts/ci/docker-compose/remove-sources.yml @@ -28,10 +28,6 @@ services: - type: bind source: ./empty target: /opt/airflow/task_sdk - # Removes providers sources from container (but not tests) - - type: bind - source: ./empty - target: /opt/airflow/providers/src # Remove all provider sources from container # START automatically generated volumes by generate-volumes-for-sources pre-commit - ../../../empty:/opt/airflow/providers/airbyte/src diff --git a/scripts/ci/kubernetes/k8s_requirements.txt b/scripts/ci/kubernetes/k8s_requirements.txt index 7fd367c2e014b..77d207026f8ff 100644 --- a/scripts/ci/kubernetes/k8s_requirements.txt +++ b/scripts/ci/kubernetes/k8s_requirements.txt @@ -1,5 +1,4 @@ -e .[devel-devscripts,devel-tests,cncf.kubernetes,sqlite] -e ./providers/standard --e ./providers -e ./task_sdk -e ./providers/cncf/kubernetes diff --git a/scripts/ci/pre_commit/check_imports_in_providers.py b/scripts/ci/pre_commit/check_imports_in_providers.py index a1fc17104a2bd..68c41066b74e1 100755 --- a/scripts/ci/pre_commit/check_imports_in_providers.py +++ b/scripts/ci/pre_commit/check_imports_in_providers.py @@ -61,10 +61,6 @@ def check_imports(folders_to_check: list[Path]): console.print(importing_file_path) imported_files_array = import_tree.get(importing_file, None) if imported_files_array is None: - if importing_file != "providers/src/airflow/providers/__init__.py": - # providers/__init__.py should be ignored - console.print(f"[red]The file {importing_file} is not discovered by ruff analyze!") - errors_found = True continue imported_file_paths = [Path(file) for file in imported_files_array] for imported_file_path in imported_file_paths: diff --git a/scripts/ci/pre_commit/check_providers_subpackages_all_have_init.py b/scripts/ci/pre_commit/check_providers_subpackages_all_have_init.py index 649e45a2e878d..1ece1581bade7 100755 --- a/scripts/ci/pre_commit/check_providers_subpackages_all_have_init.py +++ b/scripts/ci/pre_commit/check_providers_subpackages_all_have_init.py @@ -21,7 +21,9 @@ import sys from pathlib import Path -ROOT_DIR = Path(__file__).parents[3].resolve() +sys.path.insert(0, str(Path(__file__).parent.resolve())) # make sure common_precommit_utils is imported +from common_precommit_utils import AIRFLOW_PROVIDERS_ROOT_PATH, AIRFLOW_SOURCES_ROOT_PATH, console + ACCEPTED_NON_INIT_DIRS = [ "adr", "doc", @@ -30,40 +32,119 @@ "static", ] +PATH_EXTENSION_STRING = '__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore' + +# Here we should add the second level paths that we want to have sub-packages in +KNOWN_SECOND_LEVEL_PATHS = ["apache", "atlassian", "common", "cncf", "dbt", "microsoft"] + +ALLOWED_SUB_FOLDERS_OF_TESTS = ["provider_tests", "system", "integration"] + +fail_pre_commit = False +fatal_error = False +missing_init_dirs: list[Path] = [] +missing_path_extension_dirs: list[Path] = [] + + +def _what_kind_of_test_init_py_needed(base_path: Path, folder: Path) -> tuple[bool, bool]: + """Returns a tuple of two booleans indicating need and type of __init__.py file. -def check_dir_init_file(folders: list[Path]) -> None: - missing_init_dirs: list[Path] = [] + The first boolean is True if __init__.py is needed, False otherwise. + The second boolean is True if the folder needs path extension (i.e. if we expect that other packages + have the same folder to import things from this folder), False otherwise. + """ + depth = len(folder.relative_to(base_path).parts) + if depth == 0: + # this is the "tests" folder itself + return False, False + if depth == 1: + # this is one of "provider_tests", "system", "integration" folder + if folder.name not in ALLOWED_SUB_FOLDERS_OF_TESTS: + console.print(f"[red]Unexpected folder {folder} in {base_path}[/]") + console.print(f"[yellow]Only {ALLOWED_SUB_FOLDERS_OF_TESTS} should be sub-folders of tests.[/]") + global fail_pre_commit + global fatal_error + fail_pre_commit = True + fatal_error = True + return False, False + if depth == 2: + # For known sub-packages that can occur in several packages we need to add __path__ extension + return True, folder.name in KNOWN_SECOND_LEVEL_PATHS + # all other sub-packages should have plain __init__.py + return True, False + + +def check_dir_init_test_folders(folders: list[Path]) -> None: + global fail_pre_commit folders = list(folders) - for path in folders: - for root, dirs, files in os.walk(path): + for root_distribution_path in folders: + # We need init folders for all folders and for the common ones we need path extension + tests_folder = root_distribution_path / "tests" + print("Checking for __init__.py files in distribution for tests: ", tests_folder) + for root, dirs, _ in os.walk(tests_folder): # Edit it in place, so we don't recurse to folders we don't care about dirs[:] = [d for d in dirs if d not in ACCEPTED_NON_INIT_DIRS] + need_init_py, need_path_extension = _what_kind_of_test_init_py_needed(tests_folder, Path(root)) + if need_init_py: + init_py_file = Path(root).joinpath("__init__.py") + if not init_py_file.exists(): + missing_init_dirs.append(Path(root)) + console.print(f"Missing __init__.py file {init_py_file}") + if need_path_extension: + missing_path_extension_dirs.append(Path(root)) + console.print(f"Missing path extension in: {init_py_file}") + elif need_path_extension: + text = init_py_file.read_text() + if PATH_EXTENSION_STRING not in text: + missing_path_extension_dirs.append(Path(root)) + console.print(f"Missing path extension in existing {init_py_file}") - if "__init__.py" in files: - continue - missing_init_dirs.append(Path(root)) +def check_dir_init_src_folders(folders: list[Path]) -> None: + global fail_pre_commit + folders = list(folders) + for root_distribution_path in folders: + distribution_relative_path = root_distribution_path.relative_to(AIRFLOW_PROVIDERS_ROOT_PATH) + # We need init folders for all folders and for the common ones we need path extension + provider_source_folder = root_distribution_path / "src" / distribution_relative_path + print("Checking for __init__.py files in distribution for src: ", provider_source_folder) + for root, dirs, _ in os.walk(provider_source_folder): + # Edit it in place, so we don't recurse to folders we don't care about + dirs[:] = [d for d in dirs if d not in ACCEPTED_NON_INIT_DIRS] + init_py_file = Path(root).joinpath("__init__.py") + if not init_py_file.exists(): + missing_init_dirs.append(Path(root)) + console.print(f"Missing __init__.py file {init_py_file}") + + +if __name__ == "__main__": + providers_distributions = sorted( + map(lambda f: f.parent, AIRFLOW_PROVIDERS_ROOT_PATH.rglob("provider.yaml")) + ) + check_dir_init_test_folders(providers_distributions) + check_dir_init_src_folders(providers_distributions) if missing_init_dirs: - with ROOT_DIR.joinpath("scripts/ci/license-templates/LICENSE.txt").open() as license: + with AIRFLOW_SOURCES_ROOT_PATH.joinpath("scripts/ci/license-templates/LICENSE.txt").open() as license: license_txt = license.readlines() prefixed_licensed_txt = [f"# {line}" if line != "\n" else "#\n" for line in license_txt] - for missing_init_dir in missing_init_dirs: - (missing_init_dir / "__init__.py").write_text("".join(prefixed_licensed_txt)) - - print("No __init__.py file was found in the following provider directories:") - print("\n".join([missing_init_dir.as_posix() for missing_init_dir in missing_init_dirs])) - print("\nThe missing __init__.py files have been created. Please add these new files to a commit.") - sys.exit(1) - + init_file = missing_init_dir / "__init__.py" + init_file.write_text("".join(prefixed_licensed_txt)) + console.print(f"[yellow]Added missing __init__.py file:[/] {init_file}") + fail_pre_commit = True -if __name__ == "__main__": - providers_root = Path(f"{ROOT_DIR}/providers") - providers_ns = providers_root.joinpath("src", "airflow", "providers") - providers_tests = providers_root.joinpath("tests") + for missing_extension_dir in missing_path_extension_dirs: + init_file = missing_extension_dir / "__init__.py" + init_file.write_text(init_file.read_text() + PATH_EXTENSION_STRING + "\n") + console.print(f"[yellow]Added missing path extension to __init__.py file[/] {init_file}") + fail_pre_commit = True - providers_pkgs = sorted(map(lambda f: f.parent, providers_ns.rglob("provider.yaml"))) - check_dir_init_file(providers_pkgs) - - check_dir_init_file([providers_root / "tests"]) + if fail_pre_commit: + console.print( + "\n[yellow]The missing __init__.py files have been created. " + "Please add these new files to a commit." + ) + if fatal_error: + console.print("[red]Also please remove the extra test folders listed above!") + sys.exit(1) + console.print("[green]All __init__.py files are present and have necessary extensions.[/]") diff --git a/scripts/ci/pre_commit/mypy_folder.py b/scripts/ci/pre_commit/mypy_folder.py index 30b7809b2c214..7bf7826de0f86 100755 --- a/scripts/ci/pre_commit/mypy_folder.py +++ b/scripts/ci/pre_commit/mypy_folder.py @@ -35,7 +35,6 @@ ALLOWED_FOLDERS = [ "airflow", - "providers/src/airflow/providers", *[f"providers/{provider_id.replace('.', '/')}/src" for provider_id in get_all_new_provider_ids()], "dev", "docs", @@ -68,9 +67,6 @@ arguments.append(f"providers/{provider_id.replace('.', '/')}/src") arguments.append(f"providers/{provider_id.replace('.', '/')}/tests") namespace_packages = True - if mypy_folder == "providers/src/airflow/providers": - arguments.append("providers/tests") - namespace_packages = True elif mypy_folder.startswith("providers/"): arguments.append(f"{Path(mypy_folder).parent.as_posix()}/tests") namespace_packages = True diff --git a/scripts/ci/pre_commit/update_example_dags_paths.py b/scripts/ci/pre_commit/update_example_dags_paths.py index 17d2a2ccea453..b0d9e1ca81d08 100755 --- a/scripts/ci/pre_commit/update_example_dags_paths.py +++ b/scripts/ci/pre_commit/update_example_dags_paths.py @@ -69,7 +69,7 @@ def replace_match(file: Path, line: str) -> str | None: provider, version = get_provider_and_version(url_path_to_dir) proper_system_tests_url = ( f"https://github.com/apache/airflow/tree/providers-{provider}/{version}" - f"/providers/tests/system/{url_path_to_dir}" + f"/providers/{provider.replace('.','/')}/tests/system/{url_path_to_dir}" ) if not example_dags_folder.exists(): if proper_system_tests_url in file.read_text(): diff --git a/scripts/ci/pre_commit/update_providers_build_files.py b/scripts/ci/pre_commit/update_providers_build_files.py index 203eae35c7115..9d1abc910a5cb 100755 --- a/scripts/ci/pre_commit/update_providers_build_files.py +++ b/scripts/ci/pre_commit/update_providers_build_files.py @@ -32,33 +32,8 @@ console.print(f"[bright_blue]Determining providers to regenerate from: {file_list}\n") -# TODO: remove it when we move all providers to the new structure -def _find_old_providers_structure() -> None: - console.print(f"[bright_blue]Looking at {examined_file} for old structure provider.yaml") - # find the folder where provider.yaml is - for parent in Path(examined_file).parents: - console.print(f"[bright_blue]Checking {parent}") - if (parent / "provider.yaml").exists(): - provider_folder = parent - break - else: - console.print(f"[yellow]\nCould not find `provider.yaml` in any parent of {examined_file}[/]") - return - # find base for the provider sources - for parent in provider_folder.parents: - if parent.name == "providers": - base_folder = parent - console.print(f"[bright_blue]Found base folder {base_folder}") - break - else: - console.print(f"[red]\nCould not find old structure base folder for {provider_folder}") - sys.exit(1) - provider_name = ".".join(provider_folder.relative_to(base_folder).as_posix().split("/")) - providers.add(provider_name) - - # TODO(potiuk) - rename when all providers are new-style -def _find_new_providers_structure() -> None: +def _find_new_providers_structure(examined_file: Path) -> None: console.print(f"[bright_blue]Looking at {examined_file} for new structure provider.yaml") # find the folder where provider.yaml is for parent in Path(examined_file).parents: @@ -85,10 +60,7 @@ def _find_new_providers_structure() -> None: # get all folders from arguments for examined_file in file_list: - if not examined_file.startswith("providers/src"): - _find_new_providers_structure() - else: - _find_old_providers_structure() + _find_new_providers_structure(Path(examined_file)) console.print(f"[bright_blue]Regenerating build files for providers: {providers}[/]") diff --git a/scripts/ci/testing/run_unit_tests.sh b/scripts/ci/testing/run_unit_tests.sh index 75b8830b2558d..b602237ba7e52 100755 --- a/scripts/ci/testing/run_unit_tests.sh +++ b/scripts/ci/testing/run_unit_tests.sh @@ -105,11 +105,6 @@ function providers_tests() { breeze testing providers-tests --collect-only --remove-arm-packages --test-type "All" --no-db-reset RESULT=$? set +x - elif [[ "${TEST_SCOPE}" == "System" ]]; then - set -x - breeze testing system-tests providers/tests/system/example_empty.py - RESULT=$? - set +x else echo "Unknown test scope: ${TEST_SCOPE}" set -e diff --git a/scripts/in_container/run_mypy.sh b/scripts/in_container/run_mypy.sh index 976963e444a05..9552b7be4cdd9 100755 --- a/scripts/in_container/run_mypy.sh +++ b/scripts/in_container/run_mypy.sh @@ -30,8 +30,8 @@ then for folder in ${SUSPENDED_PROVIDERS_FOLDERS=} do ADDITIONAL_MYPY_OPTIONS+=( - "--exclude" "providers/src/airflow/providers/${folder}/*" - "--exclude" "providers/tests/${folder}/*" + "--exclude" "providers/${folder}/src/airflow/providers/${folder}/*" + "--exclude" "providers/${folder}/tests/${folder}/*" ) done fi diff --git a/scripts/in_container/run_provider_yaml_files_check.py b/scripts/in_container/run_provider_yaml_files_check.py index 7e7930d832c5a..5dddba1c8515f 100755 --- a/scripts/in_container/run_provider_yaml_files_check.py +++ b/scripts/in_container/run_provider_yaml_files_check.py @@ -131,7 +131,7 @@ def _load_new_schema() -> dict[str, Any]: def _load_package_data(package_paths: Iterable[str]): - schema = _load_schema() + # TODO(potiuk): rename me new_schema = _load_new_schema() result = {} for provider_yaml_path in package_paths: @@ -139,10 +139,7 @@ def _load_package_data(package_paths: Iterable[str]): provider = yaml.load(yaml_file, SafeLoader) rel_path = pathlib.Path(provider_yaml_path).relative_to(ROOT_DIR).as_posix() try: - if "providers/src" in provider_yaml_path: - jsonschema.validate(provider, schema=schema) - else: - jsonschema.validate(provider, schema=new_schema) + jsonschema.validate(provider, schema=new_schema) except jsonschema.ValidationError as ex: msg = f"Unable to parse: {provider_yaml_path}. Original error {type(ex).__name__}: {ex}" raise RuntimeError(msg) diff --git a/tests/always/test_example_dags.py b/tests/always/test_example_dags.py index e52b646991cde..c899d6230bab3 100644 --- a/tests/always/test_example_dags.py +++ b/tests/always/test_example_dags.py @@ -91,6 +91,7 @@ def get_suspended_providers_folders() -> list[str]: suspended_providers.append( provider_path.parent.relative_to(AIRFLOW_SOURCES_ROOT) .as_posix() + # TODO(potiuk): check .replace("providers/src/airflow/providers/", "") ) return suspended_providers @@ -109,6 +110,7 @@ def get_python_excluded_providers_folders() -> list[str]: excluded_providers.append( provider_path.parent.relative_to(AIRFLOW_SOURCES_ROOT) .as_posix() + # TODO(potiuk): check .replace("providers/src/airflow/providers/", "") ) return excluded_providers diff --git a/tests/always/test_project_structure.py b/tests/always/test_project_structure.py index 344d29775874c..f413917486295 100644 --- a/tests/always/test_project_structure.py +++ b/tests/always/test_project_structure.py @@ -20,25 +20,22 @@ import glob import itertools import mmap -import os import pathlib import pytest -ROOT_FOLDER = pathlib.Path(__file__).parents[2] -PROVIDERS_SRC = ROOT_FOLDER.joinpath("providers", "src") -PROVIDERS_TESTS = ROOT_FOLDER.joinpath("providers", "tests") +AIRFLOW_SOURCES_ROOT = pathlib.Path(__file__).parents[2] -NEW_PROVIDER_SRC = ROOT_FOLDER.joinpath("providers") +NEW_PROVIDER_SRC = AIRFLOW_SOURCES_ROOT.joinpath("providers") class TestProjectStructure: def test_reference_to_providers_from_core(self): - for filename in ROOT_FOLDER.glob("example_dags/**/*.py"): + for filename in AIRFLOW_SOURCES_ROOT.glob("example_dags/**/*.py"): self.assert_file_not_contains(filename, "providers") def test_deprecated_packages(self): - for filename in ROOT_FOLDER.glob("airflow/contrib/**/*.py"): + for filename in AIRFLOW_SOURCES_ROOT.glob("airflow/contrib/**/*.py"): if filename.name == "__init__.py": self.assert_file_contains(filename, "This package is deprecated.") else: @@ -56,48 +53,86 @@ def assert_file_contains(self, filename: pathlib.Path, pattern: str): def test_providers_modules_should_have_tests(self): """ - Assert every module in /providers/src/airflow/providers has a corresponding test_ file in providers/providers. + Assert every module in /providers/ has a corresponding test_ file in providers/providers. """ # The test below had a but for quite a while and we missed a lot of modules to have tess # We should make sure that one goes to 0 + # TODO(potiuk) - check if that test actually tests something OVERLOOKED_TESTS = [ - "providers/tests/amazon/aws/executors/batch/test_boto_schema.py", - "providers/tests/amazon/aws/executors/batch/test_batch_executor_config.py", - "providers/tests/amazon/aws/executors/batch/test_utils.py", - "providers/tests/amazon/aws/executors/ecs/test_boto_schema.py", - "providers/tests/amazon/aws/executors/ecs/test_ecs_executor_config.py", - "providers/tests/amazon/aws/executors/ecs/test_utils.py", - "providers/tests/amazon/aws/executors/utils/test_base_config_keys.py", - "providers/tests/amazon/aws/operators/test_emr.py", - "providers/tests/amazon/aws/operators/test_sagemaker.py", - "providers/tests/amazon/aws/sensors/test_emr.py", - "providers/tests/amazon/aws/sensors/test_sagemaker.py", - "providers/tests/amazon/aws/test_exceptions.py", - "providers/tests/amazon/aws/triggers/test_step_function.py", - "providers/tests/amazon/aws/utils/test_rds.py", - "providers/tests/amazon/aws/utils/test_sagemaker.py", - "providers/tests/amazon/aws/waiters/test_base_waiter.py", - "providers/tests/apache/drill/operators/test_drill.py", - "providers/tests/apache/druid/operators/test_druid_check.py", - "providers/tests/apache/hdfs/hooks/test_hdfs.py", - "providers/tests/apache/hdfs/log/test_hdfs_task_handler.py", - "providers/tests/apache/hdfs/sensors/test_hdfs.py", - "providers/tests/apache/hive/plugins/test_hive.py", - "providers/tests/celery/executors/test_celery_executor_utils.py", - "providers/tests/celery/executors/test_default_celery.py", - "providers/tests/cloudant/test_cloudant_fake.py", + "providers/amazon/tests/provider_tests/amazon/aws/executors/batch/test_batch_executor_config.py", + "providers/amazon/tests/provider_tests/amazon/aws/executors/batch/test_boto_schema.py", + "providers/amazon/tests/provider_tests/amazon/aws/executors/batch/test_utils.py", + "providers/amazon/tests/provider_tests/amazon/aws/executors/ecs/test_boto_schema.py", + "providers/amazon/tests/provider_tests/amazon/aws/executors/ecs/test_ecs_executor_config.py", + "providers/amazon/tests/provider_tests/amazon/aws/executors/ecs/test_utils.py", + "providers/amazon/tests/provider_tests/amazon/aws/executors/utils/test_base_config_keys.py", + "providers/amazon/tests/provider_tests/amazon/aws/operators/test_emr.py", + "providers/amazon/tests/provider_tests/amazon/aws/operators/test_sagemaker.py", + "providers/amazon/tests/provider_tests/amazon/aws/sensors/test_emr.py", + "providers/amazon/tests/provider_tests/amazon/aws/sensors/test_sagemaker.py", + "providers/amazon/tests/provider_tests/amazon/aws/test_exceptions.py", + "providers/amazon/tests/provider_tests/amazon/aws/triggers/test_step_function.py", + "providers/amazon/tests/provider_tests/amazon/aws/utils/test_rds.py", + "providers/amazon/tests/provider_tests/amazon/aws/utils/test_sagemaker.py", + "providers/amazon/tests/provider_tests/amazon/aws/waiters/test_base_waiter.py", + "providers/amazon/tests/provider_tests/amazon/test_version_compat.py", + "providers/apache/hdfs/tests/provider_tests/apache/hdfs/hooks/test_hdfs.py", + "providers/apache/hdfs/tests/provider_tests/apache/hdfs/log/test_hdfs_task_handler.py", + "providers/apache/hdfs/tests/provider_tests/apache/hdfs/sensors/test_hdfs.py", + "providers/apache/hive/tests/provider_tests/apache/hive/plugins/test_hive.py", + "providers/celery/tests/provider_tests/celery/executors/test_celery_executor_utils.py", + "providers/celery/tests/provider_tests/celery/executors/test_default_celery.py", + "providers/celery/tests/provider_tests/celery/test_version_compat.py", + "providers/cloudant/tests/provider_tests/cloudant/test_cloudant_fake.py", "providers/cncf/kubernetes/tests/provider_tests/cncf/kubernetes/executors/test_kubernetes_executor_types.py", "providers/cncf/kubernetes/tests/provider_tests/cncf/kubernetes/executors/test_kubernetes_executor_utils.py", "providers/cncf/kubernetes/tests/provider_tests/cncf/kubernetes/operators/test_kubernetes_pod.py", + "providers/cncf/kubernetes/tests/provider_tests/cncf/kubernetes/test_exceptions.py", "providers/cncf/kubernetes/tests/provider_tests/cncf/kubernetes/test_k8s_model.py", "providers/cncf/kubernetes/tests/provider_tests/cncf/kubernetes/test_kube_client.py", "providers/cncf/kubernetes/tests/provider_tests/cncf/kubernetes/test_kube_config.py", "providers/cncf/kubernetes/tests/provider_tests/cncf/kubernetes/test_python_kubernetes_script.py", "providers/cncf/kubernetes/tests/provider_tests/cncf/kubernetes/test_secret.py", + "providers/cncf/kubernetes/tests/provider_tests/cncf/kubernetes/test_version_compat.py", "providers/cncf/kubernetes/tests/provider_tests/cncf/kubernetes/triggers/test_kubernetes_pod.py", "providers/cncf/kubernetes/tests/provider_tests/cncf/kubernetes/utils/test_delete_from.py", "providers/cncf/kubernetes/tests/provider_tests/cncf/kubernetes/utils/test_k8s_hashlib_wrapper.py", "providers/cncf/kubernetes/tests/provider_tests/cncf/kubernetes/utils/test_xcom_sidecar.py", + "providers/common/compat/tests/provider_tests/common/compat/lineage/test_entities.py", + "providers/common/compat/tests/provider_tests/common/compat/standard/test_operators.py", + "providers/common/compat/tests/provider_tests/common/compat/standard/test_triggers.py", + "providers/common/compat/tests/provider_tests/common/compat/standard/test_utils.py", + "providers/common/compat/tests/provider_tests/common/compat/test_version_compat.py", + "providers/common/io/tests/provider_tests/common/io/test_version_compat.py", + "providers/edge/tests/provider_tests/edge/models/test_edge_job.py", + "providers/edge/tests/provider_tests/edge/models/test_edge_logs.py", + "providers/edge/tests/provider_tests/edge/models/test_edge_worker.py", + "providers/edge/tests/provider_tests/edge/test_version_compat.py", + "providers/edge/tests/provider_tests/edge/worker_api/routes/test__v2_compat.py", + "providers/edge/tests/provider_tests/edge/worker_api/routes/test__v2_routes.py", + "providers/edge/tests/provider_tests/edge/worker_api/routes/test_jobs.py", + "providers/edge/tests/provider_tests/edge/worker_api/test_app.py", + "providers/edge/tests/provider_tests/edge/worker_api/test_auth.py", + "providers/edge/tests/provider_tests/edge/worker_api/test_datamodels.py", + "providers/elasticsearch/tests/provider_tests/elasticsearch/test_version_compat.py", + "providers/fab/tests/provider_tests/fab/migrations/test_env.py", + "providers/fab/tests/provider_tests/fab/www/api_connexion/test_exceptions.py", + "providers/fab/tests/provider_tests/fab/www/api_connexion/test_parameters.py", + "providers/fab/tests/provider_tests/fab/www/api_connexion/test_security.py", + "providers/fab/tests/provider_tests/fab/www/api_connexion/test_types.py", + "providers/fab/tests/provider_tests/fab/www/extensions/test_init_appbuilder.py", + "providers/fab/tests/provider_tests/fab/www/extensions/test_init_jinja_globals.py", + "providers/fab/tests/provider_tests/fab/www/extensions/test_init_manifest_files.py", + "providers/fab/tests/provider_tests/fab/www/extensions/test_init_security.py", + "providers/fab/tests/provider_tests/fab/www/extensions/test_init_session.py", + "providers/fab/tests/provider_tests/fab/www/extensions/test_init_views.py", + "providers/fab/tests/provider_tests/fab/www/security/test_permissions.py", + "providers/fab/tests/provider_tests/fab/www/test_app.py", + "providers/fab/tests/provider_tests/fab/www/test_constants.py", + "providers/fab/tests/provider_tests/fab/www/test_security_manager.py", + "providers/fab/tests/provider_tests/fab/www/test_session.py", + "providers/fab/tests/provider_tests/fab/www/test_utils.py", + "providers/fab/tests/provider_tests/fab/www/test_views.py", "providers/google/tests/provider_tests/google/cloud/fs/test_gcs.py", "providers/google/tests/provider_tests/google/cloud/links/test_automl.py", "providers/google/tests/provider_tests/google/cloud/links/test_base.py", @@ -130,10 +165,12 @@ def test_providers_modules_should_have_tests(self): "providers/google/tests/provider_tests/google/cloud/operators/vertex_ai/test_auto_ml.py", "providers/google/tests/provider_tests/google/cloud/operators/vertex_ai/test_batch_prediction_job.py", "providers/google/tests/provider_tests/google/cloud/operators/vertex_ai/test_custom_job.py", + "providers/google/tests/provider_tests/google/cloud/operators/vertex_ai/test_dataset.py", "providers/google/tests/provider_tests/google/cloud/operators/vertex_ai/test_endpoint_service.py", "providers/google/tests/provider_tests/google/cloud/operators/vertex_ai/test_hyperparameter_tuning_job.py", "providers/google/tests/provider_tests/google/cloud/operators/vertex_ai/test_model_service.py", "providers/google/tests/provider_tests/google/cloud/operators/vertex_ai/test_pipeline_job.py", + "providers/google/tests/provider_tests/google/cloud/sensors/vertex_ai/test_feature_store.py", "providers/google/tests/provider_tests/google/cloud/transfers/test_bigquery_to_sql.py", "providers/google/tests/provider_tests/google/cloud/transfers/test_presto_to_gcs.py", "providers/google/tests/provider_tests/google/cloud/utils/test_bigquery.py", @@ -142,46 +179,70 @@ def test_providers_modules_should_have_tests(self): "providers/google/tests/provider_tests/google/common/links/test_storage.py", "providers/google/tests/provider_tests/google/common/test_consts.py", "providers/google/tests/provider_tests/google/test_go_module_utils.py", - "providers/tests/microsoft/azure/operators/test_adls.py", - "providers/tests/microsoft/azure/transfers/test_azure_blob_to_gcs.py", - "providers/tests/slack/notifications/test_slack_notifier.py", - "providers/tests/snowflake/triggers/test_snowflake_trigger.py", - "providers/tests/yandex/hooks/test_yandexcloud_dataproc.py", - "providers/tests/yandex/operators/test_yandexcloud_dataproc.py", - "providers/tests/fab/migrations/test_env.py", + "providers/google/tests/provider_tests/google/test_version_compat.py", + "providers/http/tests/provider_tests/http/test_exceptions.py", + "providers/microsoft/azure/tests/provider_tests/microsoft/azure/operators/test_adls.py", + "providers/openlineage/tests/provider_tests/openlineage/test_version_compat.py", + "providers/opensearch/tests/provider_tests/opensearch/test_version_compat.py", + "providers/presto/tests/provider_tests/presto/test_version_compat.py", + "providers/snowflake/tests/provider_tests/snowflake/triggers/test_snowflake_trigger.py", + "providers/standard/tests/provider_tests/standard/operators/test_empty.py", + "providers/standard/tests/provider_tests/standard/operators/test_latest_only.py", + "providers/standard/tests/provider_tests/standard/operators/test_trigger_dagrun.py", + "providers/standard/tests/provider_tests/standard/sensors/test_external_task.py", + "providers/standard/tests/provider_tests/standard/sensors/test_filesystem.py", + "providers/standard/tests/provider_tests/standard/test_version_compat.py", + "providers/standard/tests/provider_tests/standard/utils/test_sensor_helper.py", + "providers/trino/tests/provider_tests/trino/test_version_compat.py", ] - - # TODO: Should we extend this test to cover other directories? - modules_files = PROVIDERS_SRC.joinpath("airflow", "providers").glob("**/*.py") - + modules_files: list[pathlib.Path] = list( + AIRFLOW_SOURCES_ROOT.glob("providers/**/src/airflow/providers/**/*.py") + ) + # Exclude .build files + modules_files = (f for f in modules_files if ".build" not in f.parts) + # Exclude .git files + modules_files = (f for f in modules_files if ".git" not in f.parts) + # Exclude .venv files + modules_files = (f for f in modules_files if ".venv" not in f.parts) # Exclude __init__.py modules_files = filter(lambda f: f.name != "__init__.py", modules_files) - # Make path relative - modules_files = map(lambda f: f.relative_to(PROVIDERS_SRC / "airflow" / "providers"), modules_files) # Exclude example_dags modules_files = (f for f in modules_files if "example_dags" not in f.parts) # Exclude _vendor modules_files = (f for f in modules_files if "_vendor" not in f.parts) # Exclude versions file modules_files = (f for f in modules_files if "versions" not in f.parts) - # Change src/airflow/providers/ to tests/ - test_folder = pathlib.Path("providers/tests") - # Add test_ prefix to filename - expected_test_files = (test_folder.joinpath(f.with_name("test_" + f.name)) for f in modules_files) - - current_test_files = PROVIDERS_TESTS.glob("**/*.py") + # Exclude get_provider_info files + modules_files = (f for f in modules_files if "get_provider_info.py" not in f.parts) # Make path relative - current_test_files = (os.path.relpath(f, ROOT_FOLDER) for f in current_test_files) + modules_files = list(f.relative_to(AIRFLOW_SOURCES_ROOT) for f in modules_files) + current_test_files = list(NEW_PROVIDER_SRC.rglob("**/tests/**/*.py")) + # Make path relative + current_test_files = list(f.relative_to(AIRFLOW_SOURCES_ROOT) for f in current_test_files) # Exclude __init__.py - current_test_files = (f for f in current_test_files if not f.endswith("__init__.py")) - - modules_files = set(modules_files) - expected_test_files = set(expected_test_files) - set(OVERLOOKED_TESTS) - current_test_files = set(current_test_files) + current_test_files = set(f for f in current_test_files if not f.name == "__init__.py") + + modules_files_set = set(modules_files) + expected_test_files = set( + [ + pathlib.Path( + f.with_name("test_" + f.name) + .as_posix() + .replace("/src/airflow/providers/", "/tests/provider_tests/") + ) + for f in modules_files_set + ] + ) + expected_test_files = set(expected_test_files) - set( + [pathlib.Path(test_file) for test_file in OVERLOOKED_TESTS] + ) - missing_tests_files = expected_test_files - expected_test_files.intersection(current_test_files) + missing_tests_files = [ + file.as_posix() + for file in sorted(expected_test_files - expected_test_files.intersection(current_test_files)) + ] - assert set() == missing_tests_files, "Detect missing tests in providers module - please add tests" + assert missing_tests_files == [], "Detect missing tests in providers module - please add tests" added_test_files = current_test_files.intersection(OVERLOOKED_TESTS) assert set() == added_test_files, ( @@ -205,7 +266,7 @@ def get_imports_from_file(filepath: str): return import_names -def filepath_to_module(path: pathlib.Path, src_folder: pathlib.Path = PROVIDERS_SRC): +def filepath_to_module(path: pathlib.Path, src_folder: pathlib.Path): path = path.relative_to(src_folder) return path.as_posix().replace("/", ".")[: -(len(".py"))] @@ -222,7 +283,7 @@ class ProjectStructureTest: def class_paths(self): for resource_type in self.CLASS_DIRS: - python_files = PROVIDERS_SRC.glob( + python_files = AIRFLOW_SOURCES_ROOT.glob( f"airflow/providers/{self.PROVIDER}/**/{resource_type}/**/*.py", ) # Make path relative @@ -240,17 +301,12 @@ def new_class_paths(self): def list_of_classes(self): classes = {} - for operator_file in self.class_paths(): - operators_paths = self.get_classes_from_file(operator_file, PROVIDERS_SRC) - classes.update(operators_paths) - for operator_file in self.new_class_paths(): - operators_paths = self.get_classes_from_file(operator_file, NEW_PROVIDER_SRC, is_new=True) + for file in self.new_class_paths(): + operators_paths = self.get_classes_from_file(file, NEW_PROVIDER_SRC) classes.update(operators_paths) return classes - def get_classes_from_file( - self, filepath: pathlib.Path, src_folder: pathlib.Path = PROVIDERS_SRC, is_new: bool = False - ): + def get_classes_from_file(self, filepath: pathlib.Path, src_folder: pathlib.Path): with open(filepath) as py_file: content = py_file.read() doc_node = ast.parse(content, filepath) @@ -262,12 +318,12 @@ def get_classes_from_file( ): if "provider_tests" in module: continue - - if is_new: - module_path = module[module.find("airflow.providers") :] - results[f"{module_path}.{current_node.name}"] = current_node - else: - results[f"{module}.{current_node.name}"] = current_node + if "integration" in module: + continue + if "system" in module: + continue + module_path = module[module.find("airflow.providers") :] + results[f"{module_path}.{current_node.name}"] = current_node print(f"{results}") return results @@ -286,24 +342,13 @@ class ExampleCoverageTest(ProjectStructureTest): def example_paths(self): """Override this method if your example dags are located elsewhere""" - # old_design: - yield from glob.glob( - f"{ROOT_FOLDER}/providers/src/airflow/providers/{self.PROVIDER}/**/example_dags/example_*.py", - recursive=True, - ) - # new_design: - yield from glob.glob( - f"{ROOT_FOLDER}/providers/tests/system/{self.PROVIDER}/**/example_*.py", recursive=True - ) - # new_design v2: - # TODO remove #new_design when movement is finished yield from glob.glob( - f"{ROOT_FOLDER}/providers/{self.PROVIDER}/tests/system/{self.PROVIDER}/**/example_*.py", + f"{AIRFLOW_SOURCES_ROOT}/providers/{self.PROVIDER}/tests/system/{self.PROVIDER}/**/example_*.py", recursive=True, ) yield from glob.glob( - f"{ROOT_FOLDER}/providers/{self.PROVIDER}/src/airflow/providers/{self.PROVIDER}/**/example_*.py", + f"{AIRFLOW_SOURCES_ROOT}/providers/{self.PROVIDER}/src/airflow/providers/{self.PROVIDER}/**/example_*.py", recursive=True, ) @@ -321,6 +366,7 @@ def test_missing_examples(self): classes -= self.MISSING_EXAMPLES_FOR_CLASSES classes -= self.DEPRECATED_CLASSES classes -= self.BASE_CLASSES + classes = set(class_name for class_name in classes if not class_name.startswith("Test")) if set() != classes: print("Classes with missing examples:") print_sorted(classes) @@ -597,7 +643,7 @@ class TestOperatorsHooks: def test_no_illegal_suffixes(self): illegal_suffixes = ["_operator.py", "_hook.py", "_sensor.py"] files = itertools.chain.from_iterable( - glob.glob(f"{ROOT_FOLDER}/{part}/providers/**/{resource_type}/*.py", recursive=True) + glob.glob(f"{AIRFLOW_SOURCES_ROOT}/{part}/providers/**/{resource_type}/*.py", recursive=True) for resource_type in ["operators", "hooks", "sensors", "example_dags"] for part in ["airflow", "tests"] ) diff --git a/tests/conftest.py b/tests/conftest.py index d1dbd6a3a4b77..28c359b81754c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -44,14 +44,6 @@ ] -@pytest.hookimpl(tryfirst=True) -def pytest_configure(config: pytest.Config) -> None: - dep_path = [config.rootpath.joinpath("tests", "deprecations_ignore.yml")] - config.inicfg["airflow_deprecations_ignore"] = ( - config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] - ) - - @pytest.fixture def reset_environment(): """Resets env variables.""" diff --git a/tests/serialization/test_dag_serialization.py b/tests/serialization/test_dag_serialization.py index a2acc21526127..4749b6f854991 100644 --- a/tests/serialization/test_dag_serialization.py +++ b/tests/serialization/test_dag_serialization.py @@ -328,9 +328,7 @@ def get_excluded_patterns() -> Generator[str, None, None]: for provider, provider_info in all_providers.items(): if python_version in provider_info.get("excluded-python-versions"): provider_path = provider.replace(".", "/") - yield f"airflow/providers/{provider_path}/" - yield f"providers/tests/{provider_path}/" - yield f"providers/tests/system/{provider_path}/" + yield f"providers/{provider_path}" def collect_dags(dag_folder=None): @@ -343,13 +341,10 @@ def collect_dags(dag_folder=None): if dag_folder is None: patterns = [ "airflow/example_dags", - "providers/src/airflow/providers/*/example_dags", # TODO: Remove once AIP-47 is completed - "providers/src/airflow/providers/*/*/example_dags", # TODO: Remove once AIP-47 is completed # For now include amazon directly because they have many dags and are all serializing without error "providers/amazon/tests/system/*/*/", - # TODO: Remove once all providers are migrated - "providers/tests/system/*/", - "providers/tests/system/*/*/", + "providers/*/tests/system/*/", + "providers/*/*/tests/system/*/*/", ] else: if isinstance(dag_folder, (list, tuple)): @@ -587,8 +582,6 @@ def test_deserialization_across_process(self): def test_roundtrip_provider_example_dags(self): dags, _ = collect_dags( [ - "providers/src/airflow/providers/*/example_dags", - "providers/src/airflow/providers/*/*/example_dags", "providers/*/src/airflow/providers/*/example_dags", "providers/*/src/airflow/providers/*/*/example_dags", ] diff --git a/tests/www/views/conftest.py b/tests/www/views/conftest.py index 68516b4f4c865..ea5db14df11db 100644 --- a/tests/www/views/conftest.py +++ b/tests/www/views/conftest.py @@ -18,11 +18,7 @@ from __future__ import annotations import os -from collections.abc import Generator -from contextlib import contextmanager -from typing import Any, NamedTuple -import flask import jinja2 import pytest @@ -140,79 +136,3 @@ def anonymous_client(app): @pytest.fixture def anonymous_client_as_admin(app): return client_without_login_as_admin(app) - - -class _TemplateWithContext(NamedTuple): - template: jinja2.environment.Template - context: dict[str, Any] - - @property - def name(self): - return self.template.name - - @property - def local_context(self): - """Returns context without global arguments""" - result = self.context.copy() - keys_to_delete = [ - # flask.templating._default_template_ctx_processor - "g", - "request", - "session", - # flask_wtf.csrf.CSRFProtect.init_app - "csrf_token", - # flask_login.utils._user_context_processor - "current_user", - # flask_appbuilder.baseviews.BaseView.render_template - "appbuilder", - "base_template", - # airflow.www.app.py.create_app (inner method - jinja_globals) - "server_timezone", - "default_ui_timezone", - "hostname", - "navbar_color", - "navbar_text_color", - "navbar_hover_color", - "navbar_text_hover_color", - "navbar_logo_text_color", - "log_fetch_delay_sec", - "log_auto_tailing_offset", - "log_animation_speed", - "state_color_mapping", - "airflow_version", - "git_version", - "k8s_or_k8scelery_executor", - # airflow.www.static_config.configure_manifest_files - "url_for_asset", - # airflow.www.views.AirflowBaseView.render_template - "scheduler_job", - # airflow.www.views.AirflowBaseView.extra_args - "macros", - "auth_manager", - "triggerer_job", - ] - for key in keys_to_delete: - if key in result: - del result[key] - - return result - - -@pytest.fixture(scope="module") -def capture_templates(app): - @contextmanager - def manager() -> Generator[list[_TemplateWithContext], None, None]: - recorded = [] - - def record(sender, template, context, **extra): - recorded.append(_TemplateWithContext(template, context)) - - flask.template_rendered.connect(record, app) # type: ignore - try: - yield recorded - finally: - flask.template_rendered.disconnect(record, app) # type: ignore - - assert recorded, "Failed to catch the templates" - - return manager diff --git a/tests/www/views/test_views.py b/tests/www/views/test_views.py index 0be57b523f96c..871b7960200be 100644 --- a/tests/www/views/test_views.py +++ b/tests/www/views/test_views.py @@ -43,7 +43,11 @@ from tests_common.test_utils.config import conf_vars from tests_common.test_utils.mock_plugins import mock_plugin_manager -from tests_common.test_utils.www import check_content_in_response, check_content_not_in_response +from tests_common.test_utils.www import ( + capture_templates, # noqa: F401 + check_content_in_response, + check_content_not_in_response, +) pytestmark = pytest.mark.db_test @@ -85,7 +89,10 @@ def test_webserver_configuration_config_file(mock_webserver_config_global, admin assert os.path.isfile(config_file) -def test_redoc_should_render_template(capture_templates, admin_client): +def test_redoc_should_render_template( + capture_templates, # noqa: F811 + admin_client, +): from airflow.utils.docs import get_docs_url with capture_templates() as templates: diff --git a/tests/www/views/test_views_dagrun.py b/tests/www/views/test_views_dagrun.py index d258445041d48..dfd46340a7a92 100644 --- a/tests/www/views/test_views_dagrun.py +++ b/tests/www/views/test_views_dagrun.py @@ -20,80 +20,18 @@ import pytest from airflow.models import DagBag, DagRun, TaskInstance -from airflow.security import permissions from airflow.utils import timezone from airflow.utils.session import create_session from airflow.utils.types import DagRunTriggeredByType, DagRunType -from airflow.www.views import DagRunModelView -from providers.fab.tests.provider_tests.fab.auth_manager.api_endpoints.api_connexion_utils import ( - create_user, - delete_roles, - delete_user, -) -from tests.www.views.test_views_tasks import _get_appbuilder_pk_string from tests_common.test_utils.www import ( check_content_in_response, check_content_not_in_response, - client_with_login, ) pytestmark = pytest.mark.db_test -@pytest.fixture(scope="module") -def client_dr_without_dag_edit(app): - create_user( - app, - username="all_dr_permissions_except_dag_edit", - role_name="all_dr_permissions_except_dag_edit", - permissions=[ - (permissions.ACTION_CAN_READ, permissions.RESOURCE_WEBSITE), - (permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG), - (permissions.ACTION_CAN_CREATE, permissions.RESOURCE_DAG_RUN), - (permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_RUN), - (permissions.ACTION_CAN_EDIT, permissions.RESOURCE_DAG_RUN), - (permissions.ACTION_CAN_DELETE, permissions.RESOURCE_DAG_RUN), - (permissions.ACTION_CAN_ACCESS_MENU, permissions.RESOURCE_DAG_RUN), - ], - ) - - yield client_with_login( - app, - username="all_dr_permissions_except_dag_edit", - password="all_dr_permissions_except_dag_edit", - ) - - delete_user(app, username="all_dr_permissions_except_dag_edit") # type: ignore - delete_roles(app) - - -@pytest.fixture(scope="module") -def client_dr_without_dag_run_create(app): - create_user( - app, - username="all_dr_permissions_except_dag_run_create", - role_name="all_dr_permissions_except_dag_run_create", - permissions=[ - (permissions.ACTION_CAN_READ, permissions.RESOURCE_WEBSITE), - (permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG), - (permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_RUN), - (permissions.ACTION_CAN_EDIT, permissions.RESOURCE_DAG_RUN), - (permissions.ACTION_CAN_DELETE, permissions.RESOURCE_DAG_RUN), - (permissions.ACTION_CAN_ACCESS_MENU, permissions.RESOURCE_DAG_RUN), - ], - ) - - yield client_with_login( - app, - username="all_dr_permissions_except_dag_run_create", - password="all_dr_permissions_except_dag_run_create", - ) - - delete_user(app, username="all_dr_permissions_except_dag_run_create") # type: ignore - delete_roles(app) - - @pytest.fixture(scope="module", autouse=True) def _init_blank_dagrun(): """Make sure there are no runs before we test anything. @@ -113,26 +51,6 @@ def _reset_dagrun(): session.query(TaskInstance).delete() -def test_get_dagrun_can_view_dags_without_edit_perms(session, running_dag_run, client_dr_without_dag_edit): - """Test that a user without dag_edit but with dag_read permission can view the records""" - assert session.query(DagRun).filter(DagRun.dag_id == running_dag_run.dag_id).count() == 1 - resp = client_dr_without_dag_edit.get("/dagrun/list/", follow_redirects=True) - check_content_in_response(running_dag_run.dag_id, resp) - - -def test_create_dagrun_permission_denied(session, client_dr_without_dag_run_create): - data = { - "state": "running", - "dag_id": "example_bash_operator", - "logical_date": "2018-07-06 05:06:03", - "run_id": "test_list_dagrun_includes_conf", - "conf": '{"include": "me"}', - } - - resp = client_dr_without_dag_run_create.post("/dagrun/add", data=data, follow_redirects=True) - check_content_in_response("Access is Denied", resp) - - @pytest.fixture def running_dag_run(session): dag = DagBag().get_dag("example_bash_operator") @@ -185,22 +103,6 @@ def completed_dag_run_with_missing_task(session): return dag, dr -def test_delete_dagrun(session, admin_client, running_dag_run): - composite_key = _get_appbuilder_pk_string(DagRunModelView, running_dag_run) - assert session.query(DagRun).filter(DagRun.dag_id == running_dag_run.dag_id).count() == 1 - admin_client.post(f"/dagrun/delete/{composite_key}", follow_redirects=True) - assert session.query(DagRun).filter(DagRun.dag_id == running_dag_run.dag_id).count() == 0 - - -def test_delete_dagrun_permission_denied(session, running_dag_run, client_dr_without_dag_edit): - composite_key = _get_appbuilder_pk_string(DagRunModelView, running_dag_run) - - assert session.query(DagRun).filter(DagRun.dag_id == running_dag_run.dag_id).count() == 1 - resp = client_dr_without_dag_edit.post(f"/dagrun/delete/{composite_key}", follow_redirects=True) - check_content_in_response("Access is Denied", resp) - assert session.query(DagRun).filter(DagRun.dag_id == running_dag_run.dag_id).count() == 1 - - @pytest.mark.parametrize( "action, expected_ti_states, expected_message", [ @@ -281,21 +183,6 @@ def test_muldelete_dag_runs_action(session, admin_client, running_dag_run): assert session.query(DagRun).filter(DagRun.id == dag_run_id).count() == 0 -@pytest.mark.parametrize( - "action", - ["clear", "set_success", "set_failed", "set_running"], - ids=["clear", "success", "failed", "running"], -) -def test_set_dag_runs_action_permission_denied(client_dr_without_dag_edit, running_dag_run, action): - running_dag_id = running_dag_run.id - resp = client_dr_without_dag_edit.post( - "/dagrun/action_post", - data={"action": action, "rowid": [str(running_dag_id)]}, - follow_redirects=True, - ) - check_content_in_response("Access is Denied", resp) - - def test_dag_runs_queue_new_tasks_action(session, admin_client, completed_dag_run_with_missing_task): dag, dag_run = completed_dag_run_with_missing_task resp = admin_client.post( diff --git a/tests/www/views/test_views_home.py b/tests/www/views/test_views_home.py index 58689e6925a65..a27def9e79a38 100644 --- a/tests/www/views/test_views_home.py +++ b/tests/www/views/test_views_home.py @@ -22,19 +22,15 @@ import markupsafe import pytest -from airflow.models.errors import ParseImportError -from airflow.security import permissions from airflow.utils.state import State from airflow.www.utils import UIAlert from airflow.www.views import FILTER_LASTRUN_COOKIE, FILTER_STATUS_COOKIE, FILTER_TAGS_COOKIE -from providers.fab.tests.provider_tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user from tests_common.test_utils.db import clear_db_dags, clear_db_import_errors, clear_db_serialized_dags -from tests_common.test_utils.permissions import _resource_name from tests_common.test_utils.www import ( + capture_templates, # noqa: F401 check_content_in_response, check_content_not_in_response, - client_with_login, ) pytestmark = [pytest.mark.db_test, pytest.mark.need_serialized_dag] @@ -53,7 +49,10 @@ def _setup(): clean_db() -def test_home(capture_templates, admin_client): +def test_home( + capture_templates, # noqa: F811 + admin_client, +): with capture_templates() as templates: resp = admin_client.get("home", follow_redirects=True) check_content_in_response("DAGs", resp) @@ -119,86 +118,6 @@ def test_home_status_filter_cookie(admin_client): assert flask.session[FILTER_LASTRUN_COOKIE] == "all_states" -@pytest.fixture(scope="module") -def user_no_importerror(app): - """Create User that cannot access Import Errors""" - return create_user( - app, - username="user_no_importerrors", - role_name="role_no_importerrors", - permissions=[ - (permissions.ACTION_CAN_READ, permissions.RESOURCE_WEBSITE), - (permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG), - ], - ) - - -@pytest.fixture -def client_no_importerror(app, user_no_importerror): - """Client for User that cannot access Import Errors""" - return client_with_login( - app, - username="user_no_importerrors", - password="user_no_importerrors", - ) - - -@pytest.fixture(scope="module") -def user_single_dag(app): - """Create User that can only access the first DAG from TEST_FILTER_DAG_IDS""" - return create_user( - app, - username="user_single_dag", - role_name="role_single_dag", - permissions=[ - (permissions.ACTION_CAN_READ, permissions.RESOURCE_WEBSITE), - (permissions.ACTION_CAN_READ, permissions.RESOURCE_IMPORT_ERROR), - ( - permissions.ACTION_CAN_READ, - _resource_name(TEST_FILTER_DAG_IDS[0], permissions.RESOURCE_DAG), - ), - ], - ) - - -@pytest.fixture -def client_single_dag(app, user_single_dag): - """Client for User that can only access the first DAG from TEST_FILTER_DAG_IDS""" - return client_with_login( - app, - username="user_single_dag", - password="user_single_dag", - ) - - -@pytest.fixture(scope="module") -def user_single_dag_edit(app): - """Create User that can edit DAG resource only a single DAG""" - return create_user( - app, - username="user_single_dag_edit", - role_name="role_single_dag", - permissions=[ - (permissions.ACTION_CAN_READ, permissions.RESOURCE_WEBSITE), - (permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG), - ( - permissions.ACTION_CAN_EDIT, - _resource_name("filter_test_1", permissions.RESOURCE_DAG), - ), - ], - ) - - -@pytest.fixture -def client_single_dag_edit(app, user_single_dag_edit): - """Client for User that can only edit the first DAG from TEST_FILTER_DAG_IDS""" - return client_with_login( - app, - username="user_single_dag_edit", - password="user_single_dag_edit", - ) - - TEST_FILTER_DAG_IDS = ["filter_test_1", "filter_test_2", "a_first_dag_id_asc", "filter.test"] TEST_TAGS = ["example", "test", "team", "group"] @@ -211,30 +130,6 @@ def _working_dags(dag_maker): pass -@pytest.fixture -def _working_dags_with_read_perm(dag_maker): - for dag_id, tag in zip(TEST_FILTER_DAG_IDS, TEST_TAGS): - if dag_id == "filter_test_1": - access_control = {"role_single_dag": {"can_read"}} - else: - access_control = None - - with dag_maker(dag_id=dag_id, fileloc=f"/{dag_id}.py", tags=[tag], access_control=access_control): - pass - - -@pytest.fixture -def _working_dags_with_edit_perm(dag_maker): - for dag_id, tag in zip(TEST_FILTER_DAG_IDS, TEST_TAGS): - if dag_id == "filter_test_1": - access_control = {"role_single_dag": {"can_edit"}} - else: - access_control = None - - with dag_maker(dag_id=dag_id, fileloc=f"/{dag_id}.py", tags=[tag], access_control=access_control): - pass - - @pytest.fixture def _broken_dags(session): from airflow.models.errors import ParseImportError @@ -248,21 +143,6 @@ def _broken_dags(session): session.commit() -@pytest.fixture -def _broken_dags_after_working(dag_maker, session): - # First create and process a DAG file that works - path = "/all_in_one.py" - for dag_id in TEST_FILTER_DAG_IDS: - with dag_maker(dag_id=dag_id, fileloc=path, session=session): - pass - - # Then create an import error against that file - session.add( - ParseImportError(filename=path, bundle_name="dag_maker", stacktrace="Some Error\nTraceback:\n") - ) - session.commit() - - def test_home_filter_tags(_working_dags, admin_client): with admin_client: admin_client.get("home?tags=example&tags=data", follow_redirects=True) @@ -281,49 +161,6 @@ def test_home_importerrors(_broken_dags, user_client): check_content_in_response(f"/{dag_id}.py", resp) -@pytest.mark.usefixtures("_broken_dags", "_working_dags") -def test_home_no_importerrors_perm(_broken_dags, client_no_importerror): - # Users without "can read on import errors" don't see any import errors - resp = client_no_importerror.get("home", follow_redirects=True) - check_content_not_in_response("Import Errors", resp) - - -@pytest.mark.parametrize( - "page", - [ - "home", - "home?status=all", - "home?status=active", - "home?status=paused", - "home?lastrun=running", - "home?lastrun=failed", - "home?lastrun=all_states", - ], -) -@pytest.mark.usefixtures("_working_dags_with_read_perm", "_broken_dags") -def test_home_importerrors_filtered_singledag_user(client_single_dag, page): - # Users that can only see certain DAGs get a filtered list of import errors - resp = client_single_dag.get(page, follow_redirects=True) - check_content_in_response("Import Errors", resp) - # They can see the first DAGs import error - check_content_in_response(f"/{TEST_FILTER_DAG_IDS[0]}.py", resp) - check_content_in_response("Traceback", resp) - # But not the rest - for dag_id in TEST_FILTER_DAG_IDS[1:]: - check_content_not_in_response(f"/{dag_id}.py", resp) - - -def test_home_importerrors_missing_read_on_all_dags_in_file(_broken_dags_after_working, client_single_dag): - # If a user doesn't have READ on all DAGs in a file, that files traceback is redacted - resp = client_single_dag.get("home", follow_redirects=True) - check_content_in_response("Import Errors", resp) - # They can see the DAG file has an import error - check_content_in_response("all_in_one.py", resp) - # And the traceback is redacted - check_content_not_in_response("Traceback", resp) - check_content_in_response("REDACTED", resp) - - def test_home_dag_list(_working_dags, user_client): # Users with "can read on DAGs" gets all DAGs resp = user_client.get("home", follow_redirects=True) @@ -331,16 +168,6 @@ def test_home_dag_list(_working_dags, user_client): check_content_in_response(f"dag_id={dag_id}", resp) -def test_home_dag_list_filtered_singledag_user(_working_dags_with_read_perm, client_single_dag): - # Users that can only see certain DAGs get a filtered list - resp = client_single_dag.get("home", follow_redirects=True) - # They can see the first DAG - check_content_in_response(f"dag_id={TEST_FILTER_DAG_IDS[0]}", resp) - # But not the rest - for dag_id in TEST_FILTER_DAG_IDS[1:]: - check_content_not_in_response(f"dag_id={dag_id}", resp) - - def test_home_dag_list_search(_working_dags, user_client): resp = user_client.get("home?search=filter_test", follow_redirects=True) check_content_in_response("dag_id=filter_test_1", resp) @@ -349,17 +176,6 @@ def test_home_dag_list_search(_working_dags, user_client): check_content_not_in_response("dag_id=a_first_dag_id_asc", resp) -def test_home_dag_edit_permissions(capture_templates, _working_dags_with_edit_perm, client_single_dag_edit): - with capture_templates() as templates: - client_single_dag_edit.get("home", follow_redirects=True) - - dags = templates[0].local_context["dags"] - assert len(dags) > 0 - dag_edit_perm_tuple = [(dag.dag_id, dag.can_edit) for dag in dags] - assert ("filter_test_1", True) in dag_edit_perm_tuple - assert ("filter_test_2", False) in dag_edit_perm_tuple - - def test_home_robots_header_in_response(user_client): # Responses should include X-Robots-Tag header resp = user_client.get("home", follow_redirects=True) diff --git a/tests/www/views/test_views_tasks.py b/tests/www/views/test_views_tasks.py index 04160a83bf6f3..3ed967d783276 100644 --- a/tests/www/views/test_views_tasks.py +++ b/tests/www/views/test_views_tasks.py @@ -34,26 +34,20 @@ from airflow.models.xcom import XCom from airflow.providers.celery.executors.celery_executor import CeleryExecutor from airflow.providers.standard.operators.empty import EmptyOperator -from airflow.security import permissions from airflow.utils import timezone from airflow.utils.log.logging_mixin import ExternalLoggingMixin from airflow.utils.session import create_session from airflow.utils.state import DagRunState, State from airflow.utils.types import DagRunTriggeredByType, DagRunType from airflow.www.views import TaskInstanceModelView, _safe_parse_datetime -from providers.fab.tests.provider_tests.fab.auth_manager.api_endpoints.api_connexion_utils import ( - create_user, - delete_roles, - delete_user, -) from tests_common.test_utils.compat import BashOperator from tests_common.test_utils.config import conf_vars from tests_common.test_utils.db import clear_db_runs, clear_db_xcom from tests_common.test_utils.www import ( + capture_templates, # noqa: F401 check_content_in_response, check_content_not_in_response, - client_with_login, ) pytestmark = pytest.mark.db_test @@ -137,34 +131,6 @@ def _init_dagruns(app): clear_db_xcom() -@pytest.fixture(scope="module") -def client_ti_without_dag_edit(app): - create_user( - app, - username="all_ti_permissions_except_dag_edit", - role_name="all_ti_permissions_except_dag_edit", - permissions=[ - (permissions.ACTION_CAN_READ, permissions.RESOURCE_WEBSITE), - (permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG), - (permissions.ACTION_CAN_CREATE, permissions.RESOURCE_TASK_INSTANCE), - (permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE), - (permissions.ACTION_CAN_EDIT, permissions.RESOURCE_TASK_INSTANCE), - (permissions.ACTION_CAN_DELETE, permissions.RESOURCE_TASK_INSTANCE), - (permissions.ACTION_CAN_ACCESS_MENU, permissions.RESOURCE_TASK_INSTANCE), - (permissions.ACTION_CAN_DELETE, permissions.RESOURCE_DAG_RUN), - ], - ) - - yield client_with_login( - app, - username="all_ti_permissions_except_dag_edit", - password="all_ti_permissions_except_dag_edit", - ) - - delete_user(app, username="all_ti_permissions_except_dag_edit") # type: ignore - delete_roles(app) - - @pytest.mark.parametrize( "url, contents", [ @@ -454,23 +420,6 @@ def test_gantt_trigger_origin_grid_view(app, admin_client): check_content_in_response(href, resp) -def test_graph_view_without_dag_permission(app, one_dag_perm_user_client): - url = "/dags/example_bash_operator/graph" - resp = one_dag_perm_user_client.get(url, follow_redirects=True) - assert resp.status_code == 200 - assert ( - resp.request.url - == "http://localhost/dags/example_bash_operator/grid?tab=graph&dag_run_id=TEST_DAGRUN" - ) - check_content_in_response("example_bash_operator", resp) - - url = "/dags/example_xcom/graph" - resp = one_dag_perm_user_client.get(url, follow_redirects=True) - assert resp.status_code == 200 - assert resp.request.url == "http://localhost/home" - check_content_in_response("Access is Denied", resp) - - def test_last_dagruns(admin_client): resp = admin_client.post("last_dagruns", follow_redirects=True) check_content_in_response("example_bash_operator", resp) @@ -621,84 +570,17 @@ def new_dag_to_delete(testing_dag_bundle): return dag -@pytest.fixture -def per_dag_perm_user_client(app, new_dag_to_delete): - sm = app.appbuilder.sm - perm = f"{permissions.RESOURCE_DAG_PREFIX}{new_dag_to_delete.dag_id}" - - sm.create_permission(permissions.ACTION_CAN_DELETE, perm) - - create_user( - app, - username="test_user_per_dag_perms", - role_name="User with some perms", - permissions=[ - (permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG), - (permissions.ACTION_CAN_DELETE, perm), - (permissions.ACTION_CAN_READ, permissions.RESOURCE_WEBSITE), - ], - ) - - sm.find_user(username="test_user_per_dag_perms") - - yield client_with_login( - app, - username="test_user_per_dag_perms", - password="test_user_per_dag_perms", - ) - - delete_user(app, username="test_user_per_dag_perms") # type: ignore - delete_roles(app) - - -@pytest.fixture -def one_dag_perm_user_client(app): - username = "test_user_one_dag_perm" - dag_id = "example_bash_operator" - sm = app.appbuilder.sm - perm = f"{permissions.RESOURCE_DAG_PREFIX}{dag_id}" - - sm.create_permission(permissions.ACTION_CAN_READ, perm) - - create_user( - app, - username=username, - role_name="User with permission to access only one dag", - permissions=[ - (permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_RUN), - (permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE), - (permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_LOG), - (permissions.ACTION_CAN_READ, permissions.RESOURCE_WEBSITE), - (permissions.ACTION_CAN_READ, perm), - ], - ) - - sm.find_user(username=username) - - yield client_with_login( - app, - username=username, - password=username, - ) - - delete_user(app, username=username) # type: ignore - delete_roles(app) - - -def test_delete_just_dag_per_dag_permissions(new_dag_to_delete, per_dag_perm_user_client): - resp = per_dag_perm_user_client.post( - f"delete?dag_id={new_dag_to_delete.dag_id}&next=/home", follow_redirects=True - ) - check_content_in_response(f"Deleting DAG with id {new_dag_to_delete.dag_id}.", resp) - - def test_delete_just_dag_resource_permissions(new_dag_to_delete, user_client): resp = user_client.post(f"delete?dag_id={new_dag_to_delete.dag_id}&next=/home", follow_redirects=True) check_content_in_response(f"Deleting DAG with id {new_dag_to_delete.dag_id}.", resp) @pytest.mark.parametrize("endpoint", ["graph", "tree"]) -def test_show_external_log_redirect_link_with_local_log_handler(capture_templates, admin_client, endpoint): +def test_show_external_log_redirect_link_with_local_log_handler( + capture_templates, # noqa: F811 + admin_client, + endpoint, +): """Do not show external links if log handler is local.""" url = f"{endpoint}?dag_id=example_bash_operator" with capture_templates() as templates: @@ -731,7 +613,10 @@ def supports_external_link(self) -> bool: return_value=_ExternalHandler(), ) def test_show_external_log_redirect_link_with_external_log_handler( - _, capture_templates, admin_client, endpoint + _, + capture_templates, # noqa: F811 + admin_client, + endpoint, ): """Show external links if log handler is external.""" url = f"{endpoint}?dag_id=example_bash_operator" @@ -749,7 +634,10 @@ def test_show_external_log_redirect_link_with_external_log_handler( return_value=_ExternalHandler(), ) def test_external_log_redirect_link_with_external_log_handler_not_shown( - _external_handler, capture_templates, admin_client, endpoint + _external_handler, + capture_templates, # noqa: F811 + admin_client, + endpoint, ): """Show external links if log handler is external.""" _external_handler.return_value._supports_external_link = False @@ -794,23 +682,6 @@ def test_task_instance_delete(session, admin_client, create_task_instance): assert session.query(TaskInstance).filter(TaskInstance.task_id == task_id).count() == 0 -def test_task_instance_delete_permission_denied(session, client_ti_without_dag_edit, create_task_instance): - task_instance_to_delete = create_task_instance( - task_id="test_task_instance_delete_permission_denied", - logical_date=timezone.utcnow(), - state=State.DEFERRED, - session=session, - ) - session.commit() - composite_key = _get_appbuilder_pk_string(TaskInstanceModelView, task_instance_to_delete) - task_id = task_instance_to_delete.task_id - - assert session.query(TaskInstance).filter(TaskInstance.task_id == task_id).count() == 1 - resp = client_ti_without_dag_edit.post(f"/taskinstance/delete/{composite_key}", follow_redirects=True) - check_content_in_response("Access is Denied", resp) - assert session.query(TaskInstance).filter(TaskInstance.task_id == task_id).count() == 1 - - @pytest.mark.parametrize( "client_fixture, should_succeed", [ diff --git a/tests/www/views/test_views_trigger_dag.py b/tests/www/views/test_views_trigger_dag.py index e9de5d668f55b..b000038a1f6e0 100644 --- a/tests/www/views/test_views_trigger_dag.py +++ b/tests/www/views/test_views_trigger_dag.py @@ -27,14 +27,10 @@ from airflow.models import DagBag, DagRun from airflow.providers.standard.operators.empty import EmptyOperator from airflow.sdk.definitions.param import Param -from airflow.security import permissions from airflow.utils import timezone from airflow.utils.json import WebEncoder from airflow.utils.session import create_session from airflow.utils.types import DagRunType -from providers.fab.tests.provider_tests.fab.auth_manager.api_endpoints.api_connexion_utils import ( - create_test_client, -) from tests_common.test_utils.config import conf_vars from tests_common.test_utils.www import check_content_in_response @@ -298,26 +294,6 @@ def test_trigger_endpoint_uses_existing_dagbag(admin_client): check_content_in_response("example_bash_operator", resp) -def test_viewer_cant_trigger_dag(app): - """ - Test that the test_viewer user can't trigger DAGs. - """ - with create_test_client( - app, - user_name="test_user", - role_name="test_role", - permissions=[ - (permissions.ACTION_CAN_READ, permissions.RESOURCE_WEBSITE), - (permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG), - (permissions.ACTION_CAN_CREATE, permissions.RESOURCE_DAG_RUN), - ], - ) as client: - url = "dags/example_bash_operator/trigger" - resp = client.get(url, follow_redirects=True) - response_data = resp.data.decode() - assert "Access is Denied" in response_data - - def test_trigger_dag_params_array_value_none_render(admin_client, dag_maker, session, app, monkeypatch): """ Test that textarea in Trigger DAG UI is pre-populated diff --git a/tests/www/views/test_views_variable.py b/tests/www/views/test_views_variable.py index c426632da2386..51d9ade5dcb1f 100644 --- a/tests/www/views/test_views_variable.py +++ b/tests/www/views/test_views_variable.py @@ -23,15 +23,12 @@ import pytest from airflow.models import Variable -from airflow.security import permissions from airflow.utils.session import create_session -from providers.fab.tests.provider_tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user from tests_common.test_utils.www import ( _check_last_log, check_content_in_response, check_content_not_in_response, - client_with_login, ) pytestmark = pytest.mark.db_test @@ -49,30 +46,6 @@ def _clear_variables(): session.query(Variable).delete() -@pytest.fixture(scope="module") -def user_variable_reader(app): - """Create User that can only read variables""" - return create_user( - app, - username="user_variable_reader", - role_name="role_variable_reader", - permissions=[ - (permissions.ACTION_CAN_READ, permissions.RESOURCE_VARIABLE), - (permissions.ACTION_CAN_READ, permissions.RESOURCE_WEBSITE), - ], - ) - - -@pytest.fixture -def client_variable_reader(app, user_variable_reader): - """Client for User that can only access the first DAG from TEST_FILTER_DAG_IDS""" - return client_with_login( - app, - username="user_variable_reader", - password="user_variable_reader", - ) - - def test_can_handle_error_on_decrypt(session, admin_client): # create valid variable admin_client.post("/variable/add", data=VARIABLE, follow_redirects=True) @@ -209,11 +182,6 @@ def test_import_variables_form_shown(app, admin_client): check_content_in_response("Import Variables", resp) -def test_import_variables_form_hidden(app, client_variable_reader): - resp = client_variable_reader.get("/variable/list/") - check_content_not_in_response("Import Variables", resp) - - def test_description_retrieval(session, admin_client): # create valid variable admin_client.post("/variable/add", data=VARIABLE, follow_redirects=True) @@ -257,13 +225,3 @@ def test_action_muldelete(session, admin_client, variable): ) assert resp.status_code == 200 assert session.query(Variable).filter(Variable.id == var_id).count() == 0 - - -def test_action_muldelete_access_denied(session, client_variable_reader, variable): - var_id = variable.id - resp = client_variable_reader.post( - "/variable/action_post", - data={"action": "muldelete", "rowid": [var_id]}, - follow_redirects=True, - ) - check_content_in_response("Access is Denied", resp) diff --git a/tests_common/_internals/capture_warnings.py b/tests_common/_internals/capture_warnings.py index a8e719c24df69..dc7e0fb68e02e 100644 --- a/tests_common/_internals/capture_warnings.py +++ b/tests_common/_internals/capture_warnings.py @@ -120,12 +120,12 @@ def group(self) -> str: There is an assumption that airflow and all dependencies set it correct eventually. But we should not use it to filter it out, only for show in different groups. """ - if self.filename.startswith("airflow/"): + if "/tests/" in self.filename: + return "tests" + elif self.filename.startswith("airflow/"): return "airflow" - elif self.filename.startswith("providers/src/"): + elif self.filename.startswith("providers/"): return "providers" - elif self.filename.startswith("tests/") or self.filename.startswith("providers/tests/"): - return "tests" return "other" def dumps(self) -> str: diff --git a/tests_common/_internals/forbidden_warnings.py b/tests_common/_internals/forbidden_warnings.py index 1217927f11014..6e231160e70b9 100644 --- a/tests_common/_internals/forbidden_warnings.py +++ b/tests_common/_internals/forbidden_warnings.py @@ -40,12 +40,8 @@ def __init__(self, config: pytest.Config, forbidden_warnings: tuple[str, ...]): self.deprecations_ignore = deprecations_ignore excluded_cases = { - # Skip: Integration and System Tests "tests/integration/", "tests/system/", - "providers/tests/integration/", - "providers/tests/system/", - # Skip: DAGs for tests "tests/dags/", "tests/dags_corrupted/", "tests/dags_with_system_exit/", diff --git a/tests_common/pytest_plugin.py b/tests_common/pytest_plugin.py index a7bb4f6a0a68d..2ee6f6355cdf4 100644 --- a/tests_common/pytest_plugin.py +++ b/tests_common/pytest_plugin.py @@ -359,9 +359,13 @@ def initialize_airflow_tests(request): sys.exit(1) +def _find_all_deprecation_ignore_files() -> list[str]: + all_deprecation_ignore_files = AIRFLOW_SOURCES_ROOT_DIR.rglob("deprecations_ignore.yml") + return list(path.as_posix() for path in all_deprecation_ignore_files) + + def pytest_configure(config: pytest.Config) -> None: - # Ensure that the airflow sources dir is at the end of the sys path if it's not already there. Needed to - # run import from `providers/tests/` + # Ensure that the airflow sources dir is at the end of the sys path if it's not already there. if os.environ.get("USE_AIRFLOW_VERSION") == "": # if USE_AIRFLOW_VERSION is not empty, we are running tests against the installed version of Airflow # and providers so there is no need to add the sources directory to the path @@ -380,7 +384,7 @@ def pytest_configure(config: pytest.Config) -> None: f"expected one of: {', '.join(map(repr, SUPPORTED_DB_BACKENDS))}" ) pytest.exit(msg, returncode=6) - + config.inicfg["airflow_deprecations_ignore"] = _find_all_deprecation_ignore_files() config.addinivalue_line("markers", "integration(name): mark test to run with named integration") config.addinivalue_line("markers", "backend(name): mark test to run with named backend") config.addinivalue_line("markers", "system: mark test to run as system test") @@ -1712,3 +1716,15 @@ def create_db_api_hook(request): test_db_hook.escape_column_names = escape_column_names or False return test_db_hook + + +@pytest.fixture(autouse=True, scope="session") +def add_providers_test_folders_to_pythonpath(): + old_path = sys.path.copy() + all_provider_tests_folders: list[Path] = list(Path(__file__).parents[1].glob("providers/*/tests")) + all_provider_tests_folders.extend(list(Path(__file__).parents[1].glob("providers/*/*/tests"))) + for provider in all_provider_tests_folders: + sys.path.append(str(provider)) + yield + sys.path.clear() + sys.path.extend(old_path) diff --git a/tests_common/test_utils/gcp_system_helpers.py b/tests_common/test_utils/gcp_system_helpers.py index 3e5403cee0eec..e09eaeb8b03fb 100644 --- a/tests_common/test_utils/gcp_system_helpers.py +++ b/tests_common/test_utils/gcp_system_helpers.py @@ -27,14 +27,14 @@ import pytest from google.auth.environment_vars import CLOUD_SDK_CONFIG_DIR, CREDENTIALS - -import airflow.providers.google -from airflow.providers.google.cloud.utils.credentials_provider import provide_gcp_conn_and_credentials -from providers.google.tests.provider_tests.google.cloud.utils.gcp_authenticator import ( +from provider_tests.google.cloud.utils.gcp_authenticator import ( GCP_GCS_KEY, GCP_SECRET_MANAGER_KEY, ) +import airflow.providers.google +from airflow.providers.google.cloud.utils.credentials_provider import provide_gcp_conn_and_credentials + from tests_common.test_utils import AIRFLOW_MAIN_FOLDER from tests_common.test_utils.logging_command_executor import CommandExecutor from tests_common.test_utils.system_tests_class import SystemTest diff --git a/tests_common/test_utils/www.py b/tests_common/test_utils/www.py index 0e3fd43def88f..ccc888e3b57e6 100644 --- a/tests_common/test_utils/www.py +++ b/tests_common/test_utils/www.py @@ -18,10 +18,19 @@ import ast import json +from collections.abc import Generator +from contextlib import contextmanager +from typing import TYPE_CHECKING, Any, NamedTuple from unittest import mock +import flask +import pytest + from airflow.models import Log +if TYPE_CHECKING: + import jinja2 + def client_with_login(app, expected_response_code=302, **kwargs): patch_path = "airflow.providers.fab.auth_manager.security_manager.override.check_password_hash" @@ -148,3 +157,79 @@ def _check_last_log_masked_variable(session, dag_id, event, logical_date): assert len(logs) >= 1 extra_dict = ast.literal_eval(logs[0].extra) assert extra_dict == {"key": "x_secret", "val": "***"} + + +class _TemplateWithContext(NamedTuple): + template: jinja2.environment.Template + context: dict[str, Any] + + @property + def name(self): + return self.template.name + + @property + def local_context(self): + """Returns context without global arguments.""" + result = self.context.copy() + keys_to_delete = [ + # flask.templating._default_template_ctx_processor + "g", + "request", + "session", + # flask_wtf.csrf.CSRFProtect.init_app + "csrf_token", + # flask_login.utils._user_context_processor + "current_user", + # flask_appbuilder.baseviews.BaseView.render_template + "appbuilder", + "base_template", + # airflow.www.app.py.create_app (inner method - jinja_globals) + "server_timezone", + "default_ui_timezone", + "hostname", + "navbar_color", + "navbar_text_color", + "navbar_hover_color", + "navbar_text_hover_color", + "navbar_logo_text_color", + "log_fetch_delay_sec", + "log_auto_tailing_offset", + "log_animation_speed", + "state_color_mapping", + "airflow_version", + "git_version", + "k8s_or_k8scelery_executor", + # airflow.www.static_config.configure_manifest_files + "url_for_asset", + # airflow.www.views.AirflowBaseView.render_template + "scheduler_job", + # airflow.www.views.AirflowBaseView.extra_args + "macros", + "auth_manager", + "triggerer_job", + ] + for key in keys_to_delete: + if key in result: + del result[key] + + return result + + +@pytest.fixture(scope="module") +def capture_templates(app): + @contextmanager + def manager() -> Generator[list[_TemplateWithContext], None, None]: + recorded = [] + + def record(sender, template, context, **extra): + recorded.append(_TemplateWithContext(template, context)) + + flask.template_rendered.connect(record, app) # type: ignore + try: + yield recorded + finally: + flask.template_rendered.disconnect(record, app) # type: ignore + + assert recorded, "Failed to catch the templates" + + return manager