diff --git a/airflow/example_dags/example_dynamic_task_mapping.py b/airflow/example_dags/example_dynamic_task_mapping.py index 21f5a03ae8af1..03a77b0018508 100644 --- a/airflow/example_dags/example_dynamic_task_mapping.py +++ b/airflow/example_dags/example_dynamic_task_mapping.py @@ -24,7 +24,7 @@ from airflow.decorators import task from airflow.models.dag import DAG -with DAG(dag_id="example_dynamic_task_mapping", start_date=datetime(2022, 3, 4)) as dag: +with DAG(dag_id="example_dynamic_task_mapping", schedule=None, start_date=datetime(2022, 3, 4)) as dag: @task def add_one(x: int): diff --git a/airflow/example_dags/example_dynamic_task_mapping_with_no_taskflow_operators.py b/airflow/example_dags/example_dynamic_task_mapping_with_no_taskflow_operators.py index d639f345aa618..3d42ac47b5654 100644 --- a/airflow/example_dags/example_dynamic_task_mapping_with_no_taskflow_operators.py +++ b/airflow/example_dags/example_dynamic_task_mapping_with_no_taskflow_operators.py @@ -53,6 +53,7 @@ def execute(self, context): with DAG( dag_id="example_dynamic_task_mapping_with_no_taskflow_operators", + schedule=None, start_date=datetime(2022, 3, 4), catchup=False, ): diff --git a/airflow/example_dags/example_setup_teardown.py b/airflow/example_dags/example_setup_teardown.py index dd61fcdc0197e..9fab87df7568b 100644 --- a/airflow/example_dags/example_setup_teardown.py +++ b/airflow/example_dags/example_setup_teardown.py @@ -27,6 +27,7 @@ with DAG( dag_id="example_setup_teardown", + schedule=None, start_date=pendulum.datetime(2021, 1, 1, tz="UTC"), catchup=False, tags=["example"], diff --git a/airflow/example_dags/example_setup_teardown_taskflow.py b/airflow/example_dags/example_setup_teardown_taskflow.py index 21c05e29c04a8..6fec9f9a47871 100644 --- a/airflow/example_dags/example_setup_teardown_taskflow.py +++ b/airflow/example_dags/example_setup_teardown_taskflow.py @@ -26,6 +26,7 @@ with DAG( dag_id="example_setup_teardown_taskflow", + schedule=None, start_date=pendulum.datetime(2021, 1, 1, tz="UTC"), catchup=False, tags=["example"], diff --git a/airflow/example_dags/example_short_circuit_decorator.py b/airflow/example_dags/example_short_circuit_decorator.py index 00d6cd7186751..2d82eeed069b6 100644 --- a/airflow/example_dags/example_short_circuit_decorator.py +++ b/airflow/example_dags/example_short_circuit_decorator.py @@ -26,7 +26,7 @@ from airflow.utils.trigger_rule import TriggerRule -@dag(start_date=pendulum.datetime(2021, 1, 1, tz="UTC"), catchup=False, tags=["example"]) +@dag(schedule=None, start_date=pendulum.datetime(2021, 1, 1, tz="UTC"), catchup=False, tags=["example"]) def example_short_circuit_decorator(): # [START howto_operator_short_circuit] @task.short_circuit() diff --git a/airflow/example_dags/example_short_circuit_operator.py b/airflow/example_dags/example_short_circuit_operator.py index 9dfee64707243..3941ff17f95a1 100644 --- a/airflow/example_dags/example_short_circuit_operator.py +++ b/airflow/example_dags/example_short_circuit_operator.py @@ -29,6 +29,7 @@ with DAG( dag_id="example_short_circuit_operator", + schedule=None, start_date=pendulum.datetime(2021, 1, 1, tz="UTC"), catchup=False, tags=["example"], diff --git a/airflow/example_dags/example_skip_dag.py b/airflow/example_dags/example_skip_dag.py index 72ff242831aa4..2655394c6f6f4 100644 --- a/airflow/example_dags/example_skip_dag.py +++ b/airflow/example_dags/example_skip_dag.py @@ -19,6 +19,7 @@ from __future__ import annotations +import datetime from typing import TYPE_CHECKING import pendulum @@ -63,6 +64,7 @@ def create_test_pipeline(suffix, trigger_rule): with DAG( dag_id="example_skip_dag", + schedule=datetime.timedelta(days=1), start_date=pendulum.datetime(2021, 1, 1, tz="UTC"), catchup=False, tags=["example"], diff --git a/airflow/example_dags/example_task_group.py b/airflow/example_dags/example_task_group.py index 85a6f114ee372..6435a912cc419 100644 --- a/airflow/example_dags/example_task_group.py +++ b/airflow/example_dags/example_task_group.py @@ -29,6 +29,7 @@ # [START howto_task_group] with DAG( dag_id="example_task_group", + schedule=None, start_date=pendulum.datetime(2021, 1, 1, tz="UTC"), catchup=False, tags=["example"], diff --git a/airflow/example_dags/example_task_group_decorator.py b/airflow/example_dags/example_task_group_decorator.py index 56d4decf63a81..ce4a0e33b8c24 100644 --- a/airflow/example_dags/example_task_group_decorator.py +++ b/airflow/example_dags/example_task_group_decorator.py @@ -67,6 +67,7 @@ def task_group_function(value: int) -> None: # Executing Tasks and TaskGroups with DAG( dag_id="example_task_group_decorator", + schedule=None, start_date=pendulum.datetime(2021, 1, 1, tz="UTC"), catchup=False, tags=["example"], diff --git a/airflow/models/dag.py b/airflow/models/dag.py index 50e2222bd0185..518b367067ef7 100644 --- a/airflow/models/dag.py +++ b/airflow/models/dag.py @@ -680,6 +680,12 @@ def __init__( self.timetable = DatasetTriggeredTimetable(DatasetAll(*schedule)) self.schedule_interval = self.timetable.summary elif isinstance(schedule, ArgNotSet): + warnings.warn( + "Creating a DAG with an implicit schedule is deprecated, and will stop working " + "in a future release. Set `schedule=datetime.timedelta(days=1)` explicitly.", + RemovedInAirflow3Warning, + stacklevel=2, + ) self.timetable = create_timetable(schedule, self.timezone) self.schedule_interval = DEFAULT_SCHEDULE_INTERVAL else: @@ -3282,7 +3288,7 @@ def get_serialized_fields(cls): "auto_register", "fail_stop", } - cls.__serialized_fields = frozenset(vars(DAG(dag_id="test"))) - exclusion_list + cls.__serialized_fields = frozenset(vars(DAG(dag_id="test", schedule=None))) - exclusion_list return cls.__serialized_fields def get_edge_info(self, upstream_task_id: str, downstream_task_id: str) -> EdgeInfoType: diff --git a/airflow/serialization/serialized_objects.py b/airflow/serialization/serialized_objects.py index c36eecf5081af..4004b83a991bd 100644 --- a/airflow/serialization/serialized_objects.py +++ b/airflow/serialization/serialized_objects.py @@ -1670,7 +1670,7 @@ def serialize_dag(cls, dag: DAG) -> dict: @classmethod def deserialize_dag(cls, encoded_dag: dict[str, Any]) -> SerializedDAG: """Deserializes a DAG from a JSON object.""" - dag = SerializedDAG(dag_id=encoded_dag["_dag_id"]) + dag = SerializedDAG(dag_id=encoded_dag["_dag_id"], schedule=None) for k, v in encoded_dag.items(): if k == "_downstream_task_ids": diff --git a/kubernetes_tests/test_kubernetes_pod_operator.py b/kubernetes_tests/test_kubernetes_pod_operator.py index 2d10cdac6fef4..87fa668daa4fc 100644 --- a/kubernetes_tests/test_kubernetes_pod_operator.py +++ b/kubernetes_tests/test_kubernetes_pod_operator.py @@ -50,7 +50,7 @@ def create_context(task) -> Context: - dag = DAG(dag_id="dag") + dag = DAG(dag_id="dag", schedule=None) execution_date = timezone.datetime( 2016, 1, 1, 1, 0, 0, tzinfo=timezone.parse_timezone("Europe/Amsterdam") ) diff --git a/tests/api_connexion/endpoints/test_dag_endpoint.py b/tests/api_connexion/endpoints/test_dag_endpoint.py index d3192723a7077..80739a30a5269 100644 --- a/tests/api_connexion/endpoints/test_dag_endpoint.py +++ b/tests/api_connexion/endpoints/test_dag_endpoint.py @@ -86,6 +86,7 @@ def configured_app(minimal_app_for_api): with DAG( DAG_ID, + schedule=None, start_date=datetime(2020, 6, 15), doc_md="details", params={"foo": 1}, @@ -93,10 +94,10 @@ def configured_app(minimal_app_for_api): ) as dag: EmptyOperator(task_id=TASK_ID) - with DAG(DAG2_ID, start_date=datetime(2020, 6, 15)) as dag2: # no doc_md + with DAG(DAG2_ID, schedule=None, start_date=datetime(2020, 6, 15)) as dag2: # no doc_md EmptyOperator(task_id=TASK_ID) - with DAG(DAG3_ID) as dag3: # DAG start_date set to None + with DAG(DAG3_ID, schedule=None) as dag3: # DAG start_date set to None EmptyOperator(task_id=TASK_ID, start_date=datetime(2019, 6, 12)) dag_bag = DagBag(os.devnull, include_examples=False) @@ -962,10 +963,10 @@ def test_only_active_false_returns_all_dags(self, url_safe_serializer): ) def test_filter_dags_by_tags_works(self, url, expected_dag_ids): # test filter by tags - dag1 = DAG(dag_id="TEST_DAG_1", tags=["t1"]) - dag2 = DAG(dag_id="TEST_DAG_2", tags=["t2"]) - dag3 = DAG(dag_id="TEST_DAG_3", tags=["t1", "t2"]) - dag4 = DAG(dag_id="TEST_DAG_4") + dag1 = DAG(dag_id="TEST_DAG_1", schedule=None, tags=["t1"]) + dag2 = DAG(dag_id="TEST_DAG_2", schedule=None, tags=["t2"]) + dag3 = DAG(dag_id="TEST_DAG_3", schedule=None, tags=["t1", "t2"]) + dag4 = DAG(dag_id="TEST_DAG_4", schedule=None) dag1.sync_to_db() dag2.sync_to_db() dag3.sync_to_db() @@ -990,10 +991,10 @@ def test_filter_dags_by_tags_works(self, url, expected_dag_ids): ) def test_filter_dags_by_dag_id_works(self, url, expected_dag_ids): # test filter by tags - dag1 = DAG(dag_id="TEST_DAG_1") - dag2 = DAG(dag_id="TEST_DAG_2") - dag3 = DAG(dag_id="SAMPLE_DAG_1") - dag4 = DAG(dag_id="SAMPLE_DAG_2") + dag1 = DAG(dag_id="TEST_DAG_1", schedule=None) + dag2 = DAG(dag_id="TEST_DAG_2", schedule=None) + dag3 = DAG(dag_id="SAMPLE_DAG_1", schedule=None) + dag4 = DAG(dag_id="SAMPLE_DAG_2", schedule=None) dag1.sync_to_db() dag2.sync_to_db() dag3.sync_to_db() @@ -1886,10 +1887,10 @@ def test_only_active_false_returns_all_dags(self, url_safe_serializer, session): ) def test_filter_dags_by_tags_works(self, url, expected_dag_ids): # test filter by tags - dag1 = DAG(dag_id="TEST_DAG_1", tags=["t1"]) - dag2 = DAG(dag_id="TEST_DAG_2", tags=["t2"]) - dag3 = DAG(dag_id="TEST_DAG_3", tags=["t1", "t2"]) - dag4 = DAG(dag_id="TEST_DAG_4") + dag1 = DAG(dag_id="TEST_DAG_1", schedule=None, tags=["t1"]) + dag2 = DAG(dag_id="TEST_DAG_2", schedule=None, tags=["t2"]) + dag3 = DAG(dag_id="TEST_DAG_3", schedule=None, tags=["t1", "t2"]) + dag4 = DAG(dag_id="TEST_DAG_4", schedule=None) dag1.sync_to_db() dag2.sync_to_db() dag3.sync_to_db() @@ -1919,10 +1920,10 @@ def test_filter_dags_by_tags_works(self, url, expected_dag_ids): ) def test_filter_dags_by_dag_id_works(self, url, expected_dag_ids): # test filter by tags - dag1 = DAG(dag_id="TEST_DAG_1") - dag2 = DAG(dag_id="TEST_DAG_2") - dag3 = DAG(dag_id="SAMPLE_DAG_1") - dag4 = DAG(dag_id="SAMPLE_DAG_2") + dag1 = DAG(dag_id="TEST_DAG_1", schedule=None) + dag2 = DAG(dag_id="TEST_DAG_2", schedule=None) + dag3 = DAG(dag_id="SAMPLE_DAG_1", schedule=None) + dag4 = DAG(dag_id="SAMPLE_DAG_2", schedule=None) dag1.sync_to_db() dag2.sync_to_db() dag3.sync_to_db() diff --git a/tests/api_connexion/endpoints/test_extra_link_endpoint.py b/tests/api_connexion/endpoints/test_extra_link_endpoint.py index 098b3108dcdf5..76b2a09603609 100644 --- a/tests/api_connexion/endpoints/test_extra_link_endpoint.py +++ b/tests/api_connexion/endpoints/test_extra_link_endpoint.py @@ -95,7 +95,7 @@ def teardown_method(self) -> None: clear_db_xcom() def _create_dag(self): - with DAG(dag_id="TEST_DAG_ID", default_args={"start_date": self.default_time}) as dag: + with DAG(dag_id="TEST_DAG_ID", schedule=None, default_args={"start_date": self.default_time}) as dag: CustomOperator(task_id="TEST_SINGLE_LINK", bash_command="TEST_LINK_VALUE") CustomOperator( task_id="TEST_MULTIPLE_LINK", bash_command=["TEST_LINK_VALUE_1", "TEST_LINK_VALUE_2"] diff --git a/tests/api_connexion/endpoints/test_log_endpoint.py b/tests/api_connexion/endpoints/test_log_endpoint.py index 7c4fb613e4843..420d2dd65f89c 100644 --- a/tests/api_connexion/endpoints/test_log_endpoint.py +++ b/tests/api_connexion/endpoints/test_log_endpoint.py @@ -284,7 +284,7 @@ def test_get_logs_of_removed_task(self, request_url, expected_filename, extra_qu # Recreate DAG without tasks dagbag = self.app.dag_bag - dag = DAG(self.DAG_ID, start_date=timezone.parse(self.default_time)) + dag = DAG(self.DAG_ID, schedule=None, start_date=timezone.parse(self.default_time)) del dagbag.dags[self.DAG_ID] dagbag.bag_dag(dag=dag) diff --git a/tests/api_connexion/endpoints/test_task_endpoint.py b/tests/api_connexion/endpoints/test_task_endpoint.py index cd3f323a504d0..d0a4fb903c8b8 100644 --- a/tests/api_connexion/endpoints/test_task_endpoint.py +++ b/tests/api_connexion/endpoints/test_task_endpoint.py @@ -70,11 +70,11 @@ class TestTaskEndpoint: @pytest.fixture(scope="class") def setup_dag(self, configured_app): - with DAG(self.dag_id, start_date=self.task1_start_date, doc_md="details") as dag: + with DAG(self.dag_id, schedule=None, start_date=self.task1_start_date, doc_md="details") as dag: task1 = EmptyOperator(task_id=self.task_id, params={"foo": "bar"}) task2 = EmptyOperator(task_id=self.task_id2, start_date=self.task2_start_date) - with DAG(self.mapped_dag_id, start_date=self.task1_start_date) as mapped_dag: + with DAG(self.mapped_dag_id, schedule=None, start_date=self.task1_start_date) as mapped_dag: EmptyOperator(task_id=self.task_id3) # Use the private _expand() method to avoid the empty kwargs check. # We don't care about how the operator runs here, only its presence. diff --git a/tests/api_connexion/schemas/test_dag_schema.py b/tests/api_connexion/schemas/test_dag_schema.py index ae3a87db151e8..3b610fc3e3f53 100644 --- a/tests/api_connexion/schemas/test_dag_schema.py +++ b/tests/api_connexion/schemas/test_dag_schema.py @@ -16,7 +16,7 @@ # under the License. from __future__ import annotations -from datetime import datetime +from datetime import datetime, timedelta import pendulum import pytest @@ -150,6 +150,7 @@ def test_serialize_test_dag_collection_schema(url_safe_serializer): def test_serialize_test_dag_detail_schema(url_safe_serializer): dag = DAG( dag_id="test_dag", + schedule=timedelta(days=1), start_date=datetime(2020, 6, 19), doc_md="docs", orientation="LR", diff --git a/tests/api_experimental/common/test_delete_dag.py b/tests/api_experimental/common/test_delete_dag.py index e7c249ee02ac5..1f6e656e0503e 100644 --- a/tests/api_experimental/common/test_delete_dag.py +++ b/tests/api_experimental/common/test_delete_dag.py @@ -70,7 +70,11 @@ class TestDeleteDAGSuccessfulDelete: def setup_dag_models(self): task = EmptyOperator( task_id="dummy", - dag=DAG(dag_id=self.key, default_args={"start_date": timezone.datetime(2022, 1, 1)}), + dag=DAG( + dag_id=self.key, + schedule=None, + default_args={"start_date": timezone.datetime(2022, 1, 1)}, + ), owner="airflow", ) diff --git a/tests/api_experimental/common/test_trigger_dag.py b/tests/api_experimental/common/test_trigger_dag.py index e65a3dad6ea14..9a897b5cddf4f 100644 --- a/tests/api_experimental/common/test_trigger_dag.py +++ b/tests/api_experimental/common/test_trigger_dag.py @@ -48,7 +48,7 @@ def test_trigger_dag_dag_not_found(self, dag_bag_mock): @mock.patch("airflow.models.DagBag") def test_trigger_dag_dag_run_exist(self, dag_bag_mock, dag_run_mock): dag_id = "dag_run_exist" - dag = DAG(dag_id) + dag = DAG(dag_id, schedule=None) dag_bag_mock.dags = [dag_id] dag_bag_mock.get_dag.return_value = dag dag_run_mock.find_duplicate.return_value = DagRun() @@ -58,7 +58,11 @@ def test_trigger_dag_dag_run_exist(self, dag_bag_mock, dag_run_mock): @mock.patch("airflow.models.DagBag") def test_trigger_dag_with_too_early_start_date(self, dag_bag_mock): dag_id = "trigger_dag_with_too_early_start_date" - dag = DAG(dag_id, default_args={"start_date": timezone.datetime(2016, 9, 5, 10, 10, 0)}) + dag = DAG( + dag_id=dag_id, + schedule=None, + default_args={"start_date": timezone.datetime(2016, 9, 5, 10, 10, 0)}, + ) dag_bag_mock.dags = [dag_id] dag_bag_mock.get_dag.return_value = dag @@ -68,7 +72,11 @@ def test_trigger_dag_with_too_early_start_date(self, dag_bag_mock): @mock.patch("airflow.models.DagBag") def test_trigger_dag_with_valid_start_date(self, dag_bag_mock): dag_id = "trigger_dag_with_valid_start_date" - dag = DAG(dag_id, default_args={"start_date": timezone.datetime(2016, 9, 5, 10, 10, 0)}) + dag = DAG( + dag_id=dag_id, + schedule=None, + default_args={"start_date": timezone.datetime(2016, 9, 5, 10, 10, 0)}, + ) dag_bag_mock.dags = [dag_id] dag_bag_mock.get_dag.return_value = dag dag_bag_mock.dags_hash = {} @@ -88,7 +96,7 @@ def test_trigger_dag_with_valid_start_date(self, dag_bag_mock): @mock.patch("airflow.models.DagBag") def test_trigger_dag_with_conf(self, dag_bag_mock, conf, expected_conf): dag_id = "trigger_dag_with_conf" - dag = DAG(dag_id) + dag = DAG(dag_id, schedule=None) dag_bag_mock.dags = [dag_id] dag_bag_mock.get_dag.return_value = dag diff --git a/tests/callbacks/test_callback_requests.py b/tests/callbacks/test_callback_requests.py index 7153898839682..6d900c8bd3571 100644 --- a/tests/callbacks/test_callback_requests.py +++ b/tests/callbacks/test_callback_requests.py @@ -69,7 +69,10 @@ def test_from_json(self, input, request_class): if input is None: ti = TaskInstance( task=BashOperator( - task_id="test", bash_command="true", dag=DAG(dag_id="id"), start_date=datetime.now() + task_id="test", + bash_command="true", + start_date=datetime.now(), + dag=DAG(dag_id="id", schedule=None), ), run_id="fake_run", state=State.RUNNING, diff --git a/tests/conftest.py b/tests/conftest.py index d41ac095c2858..065895d5c4569 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -815,6 +815,7 @@ def dag_maker(request): if serialized_marker: (want_serialized,) = serialized_marker.args or (True,) + from airflow.utils.helpers import NOTSET from airflow.utils.log.logging_mixin import LoggingMixin class DagFactory(LoggingMixin): @@ -927,6 +928,7 @@ def create_dagrun_after(self, dagrun, **kwargs): def __call__( self, dag_id="test_dag", + schedule=NOTSET, serialized=want_serialized, fileloc=None, processor_subdir=None, @@ -955,6 +957,12 @@ def __call__( DEFAULT_DATE = timezone.datetime(2016, 1, 1) self.start_date = DEFAULT_DATE self.kwargs["start_date"] = self.start_date + # Set schedule argument to explicitly set value, or a default if no + # other scheduling arguments are set. + if schedule is not NOTSET: + self.kwargs["schedule"] = schedule + elif "timetable" not in self.kwargs and "schedule_interval" not in self.kwargs: + self.kwargs["schedule"] = timedelta(days=1) self.dag = DAG(dag_id, **self.kwargs) self.dag.fileloc = fileloc or request.module.__file__ self.want_serialized = serialized diff --git a/tests/dag_processing/test_job_runner.py b/tests/dag_processing/test_job_runner.py index 6aad004cc1762..8112b7222a697 100644 --- a/tests/dag_processing/test_job_runner.py +++ b/tests/dag_processing/test_job_runner.py @@ -62,7 +62,13 @@ from tests.models import TEST_DAGS_FOLDER from tests.test_utils.compat import ParseImportError from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_callbacks, clear_db_dags, clear_db_runs, clear_db_serialized_dags +from tests.test_utils.db import ( + clear_db_callbacks, + clear_db_dags, + clear_db_import_errors, + clear_db_runs, + clear_db_serialized_dags, +) pytestmark = pytest.mark.db_test @@ -148,7 +154,12 @@ def run_processor_manager_one_loop(self, manager, parent_pipe): return results raise RuntimeError("Shouldn't get here - nothing to read, but manager not finished!") + @pytest.fixture + def clear_parse_import_errors(self): + clear_db_import_errors() + @pytest.mark.skip_if_database_isolation_mode # Test is broken in db isolation mode + @pytest.mark.usefixtures("clear_parse_import_errors") @conf_vars({("core", "load_examples"): "False"}) def test_remove_file_clears_import_error(self, tmp_path): path_to_parse = tmp_path / "temp_dag.py" diff --git a/tests/dags/test_cli_triggered_dags.py b/tests/dags/test_cli_triggered_dags.py index 4513ec299f0a3..4dad87c947544 100644 --- a/tests/dags/test_cli_triggered_dags.py +++ b/tests/dags/test_cli_triggered_dags.py @@ -39,7 +39,9 @@ def success(ti=None, *args, **kwargs): # DAG tests that tasks ignore all dependencies dag1 = DAG( - dag_id="test_run_ignores_all_dependencies", default_args=dict(depends_on_past=True, **default_args) + dag_id="test_run_ignores_all_dependencies", + schedule=None, + default_args={"depends_on_past": True, **default_args}, ) dag1_task1 = PythonOperator(task_id="test_run_dependency_task", python_callable=fail, dag=dag1) dag1_task2 = PythonOperator(task_id="test_run_dependent_task", python_callable=success, dag=dag1) diff --git a/tests/dags/test_dagrun_fast_follow.py b/tests/dags/test_dagrun_fast_follow.py index 3248332902cc0..1053869d81ed4 100644 --- a/tests/dags/test_dagrun_fast_follow.py +++ b/tests/dags/test_dagrun_fast_follow.py @@ -31,7 +31,7 @@ dag_id = "test_dagrun_fast_follow" -dag = DAG(dag_id=dag_id, default_args=args) +dag = DAG(dag_id=dag_id, schedule=None, default_args=args) # A -> B -> C task_a = PythonOperator(task_id="A", dag=dag, python_callable=lambda: True) diff --git a/tests/dags/test_default_impersonation.py b/tests/dags/test_default_impersonation.py index 7e3b9806d0f05..468b7dce072dd 100644 --- a/tests/dags/test_default_impersonation.py +++ b/tests/dags/test_default_impersonation.py @@ -30,7 +30,7 @@ "start_date": DEFAULT_DATE, } -dag = DAG(dag_id="test_default_impersonation", default_args=args) +dag = DAG(dag_id="test_default_impersonation", schedule=None, default_args=args) deelevated_user = "airflow_test_user" diff --git a/tests/dags/test_double_trigger.py b/tests/dags/test_double_trigger.py index b95a5375c2166..a6b17de6fae39 100644 --- a/tests/dags/test_double_trigger.py +++ b/tests/dags/test_double_trigger.py @@ -29,5 +29,5 @@ "start_date": DEFAULT_DATE, } -dag = DAG(dag_id="test_localtaskjob_double_trigger", default_args=args) +dag = DAG(dag_id="test_localtaskjob_double_trigger", schedule=None, default_args=args) task = EmptyOperator(task_id="test_localtaskjob_double_trigger_task", dag=dag) diff --git a/tests/dags/test_external_task_sensor_check_existense.py b/tests/dags/test_external_task_sensor_check_existense.py index 60431822a8a5d..05bd82c509f98 100644 --- a/tests/dags/test_external_task_sensor_check_existense.py +++ b/tests/dags/test_external_task_sensor_check_existense.py @@ -22,10 +22,18 @@ from airflow.sensors.external_task import ExternalTaskSensor from tests.models import DEFAULT_DATE -with DAG(dag_id="test_external_task_sensor_check_existence_ext", start_date=DEFAULT_DATE) as dag1: +with DAG( + dag_id="test_external_task_sensor_check_existence_ext", + schedule=None, + start_date=DEFAULT_DATE, +) as dag1: EmptyOperator(task_id="empty") -with DAG(dag_id="test_external_task_sensor_check_existence", start_date=DEFAULT_DATE) as dag2: +with DAG( + dag_id="test_external_task_sensor_check_existence", + schedule=None, + start_date=DEFAULT_DATE, +) as dag2: ExternalTaskSensor( task_id="external_task_sensor", external_dag_id="test_external_task_sensor_check_existence_ext", diff --git a/tests/dags/test_heartbeat_failed_fast.py b/tests/dags/test_heartbeat_failed_fast.py index d9715eb527ee4..aee7a67030585 100644 --- a/tests/dags/test_heartbeat_failed_fast.py +++ b/tests/dags/test_heartbeat_failed_fast.py @@ -29,5 +29,5 @@ "start_date": DEFAULT_DATE, } -dag = DAG(dag_id="test_heartbeat_failed_fast", default_args=args) +dag = DAG(dag_id="test_heartbeat_failed_fast", default_args=args, schedule=None) task = BashOperator(task_id="test_heartbeat_failed_fast_op", bash_command="sleep 7", dag=dag) diff --git a/tests/dags/test_impersonation.py b/tests/dags/test_impersonation.py index 470d2748e3620..33a3c89d328d9 100644 --- a/tests/dags/test_impersonation.py +++ b/tests/dags/test_impersonation.py @@ -30,7 +30,7 @@ "start_date": DEFAULT_DATE, } -dag = DAG(dag_id="test_impersonation", default_args=args) +dag = DAG(dag_id="test_impersonation", schedule=None, default_args=args) run_as_user = "airflow_test_user" diff --git a/tests/dags/test_issue_1225.py b/tests/dags/test_issue_1225.py index f4312a42c4d8c..96a3ad156269e 100644 --- a/tests/dags/test_issue_1225.py +++ b/tests/dags/test_issue_1225.py @@ -40,7 +40,11 @@ def fail(): # DAG tests backfill with pooled tasks # Previously backfill would queue the task but never run it -dag1 = DAG(dag_id="test_backfill_pooled_task_dag", default_args=default_args) +dag1 = DAG( + dag_id="test_backfill_pooled_task_dag", + schedule=timedelta(days=1), + default_args=default_args, +) dag1_task1 = EmptyOperator( task_id="test_backfill_pooled_task", dag=dag1, @@ -50,7 +54,11 @@ def fail(): # dag2 has been moved to test_prev_dagrun_dep.py # DAG tests that a Dag run that doesn't complete is marked failed -dag3 = DAG(dag_id="test_dagrun_states_fail", default_args=default_args) +dag3 = DAG( + dag_id="test_dagrun_states_fail", + schedule=timedelta(days=1), + default_args=default_args, +) dag3_task1 = PythonOperator(task_id="test_dagrun_fail", dag=dag3, python_callable=fail) dag3_task2 = EmptyOperator( task_id="test_dagrun_succeed", @@ -59,7 +67,11 @@ def fail(): dag3_task2.set_upstream(dag3_task1) # DAG tests that a Dag run that completes but has a failure is marked success -dag4 = DAG(dag_id="test_dagrun_states_success", default_args=default_args) +dag4 = DAG( + dag_id="test_dagrun_states_success", + schedule=timedelta(days=1), + default_args=default_args, +) dag4_task1 = PythonOperator( task_id="test_dagrun_fail", dag=dag4, @@ -69,7 +81,11 @@ def fail(): dag4_task2.set_upstream(dag4_task1) # DAG tests that a Dag run that completes but has a root failure is marked fail -dag5 = DAG(dag_id="test_dagrun_states_root_fail", default_args=default_args) +dag5 = DAG( + dag_id="test_dagrun_states_root_fail", + schedule=timedelta(days=1), + default_args=default_args, +) dag5_task1 = EmptyOperator( task_id="test_dagrun_succeed", dag=dag5, @@ -81,7 +97,11 @@ def fail(): ) # DAG tests that a Dag run that is deadlocked with no states is failed -dag6 = DAG(dag_id="test_dagrun_states_deadlock", default_args=default_args) +dag6 = DAG( + dag_id="test_dagrun_states_deadlock", + schedule=timedelta(days=1), + default_args=default_args, +) dag6_task1 = EmptyOperator( task_id="test_depends_on_past", depends_on_past=True, @@ -96,7 +116,11 @@ def fail(): # DAG tests that a Dag run that doesn't complete but has a root failure is marked running -dag8 = DAG(dag_id="test_dagrun_states_root_fail_unfinished", default_args=default_args) +dag8 = DAG( + dag_id="test_dagrun_states_root_fail_unfinished", + schedule=timedelta(days=1), + default_args=default_args, +) dag8_task1 = EmptyOperator( task_id="test_dagrun_unfinished", # The test will unset the task instance state after # running this test @@ -109,7 +133,11 @@ def fail(): ) # DAG tests that a Dag run that completes but has a root in the future is marked as success -dag9 = DAG(dag_id="test_dagrun_states_root_future", default_args=default_args) +dag9 = DAG( + dag_id="test_dagrun_states_root_future", + schedule=timedelta(days=1), + default_args=default_args, +) dag9_task1 = EmptyOperator( task_id="current", dag=dag9, diff --git a/tests/dags/test_latest_runs.py b/tests/dags/test_latest_runs.py index 3f36f76ed7c86..9430274713f40 100644 --- a/tests/dags/test_latest_runs.py +++ b/tests/dags/test_latest_runs.py @@ -23,5 +23,5 @@ from airflow.operators.empty import EmptyOperator for i in range(1, 2): - dag = DAG(dag_id=f"test_latest_runs_{i}") + dag = DAG(dag_id=f"test_latest_runs_{i}", schedule=None) task = EmptyOperator(task_id="dummy_task", dag=dag, owner="airflow", start_date=datetime(2016, 2, 1)) diff --git a/tests/dags/test_mapped_classic.py b/tests/dags/test_mapped_classic.py index 70e2af7cbc03b..fec7e98a89340 100644 --- a/tests/dags/test_mapped_classic.py +++ b/tests/dags/test_mapped_classic.py @@ -32,7 +32,7 @@ def consumer(value): print(repr(value)) -with DAG(dag_id="test_mapped_classic", start_date=datetime.datetime(2022, 1, 1)) as dag: +with DAG(dag_id="test_mapped_classic", schedule=None, start_date=datetime.datetime(2022, 1, 1)) as dag: PythonOperator.partial(task_id="consumer", python_callable=consumer).expand(op_args=make_arg_lists()) PythonOperator.partial(task_id="consumer_literal", python_callable=consumer).expand( op_args=[[1], [2], [3]], diff --git a/tests/dags/test_mapped_taskflow.py b/tests/dags/test_mapped_taskflow.py index 4ba29f3cbcb56..61bb7d8048fea 100644 --- a/tests/dags/test_mapped_taskflow.py +++ b/tests/dags/test_mapped_taskflow.py @@ -20,7 +20,11 @@ from airflow.models.dag import DAG -with DAG(dag_id="test_mapped_taskflow", start_date=datetime.datetime(2022, 1, 1)) as dag: +with DAG( + dag_id="test_mapped_taskflow", + start_date=datetime.datetime(2022, 1, 1), + schedule="@daily", +) as dag: @dag.task def make_list(): diff --git a/tests/dags/test_mark_state.py b/tests/dags/test_mark_state.py index 71e3b0e430049..da520552e1c0a 100644 --- a/tests/dags/test_mark_state.py +++ b/tests/dags/test_mark_state.py @@ -36,7 +36,7 @@ dag_id = "test_mark_state" -dag = DAG(dag_id=dag_id, default_args=args) +dag = DAG(dag_id=dag_id, schedule=None, default_args=args) def success_callback(context): diff --git a/tests/dags/test_no_impersonation.py b/tests/dags/test_no_impersonation.py index c2b04d626049a..2a75d5321473c 100644 --- a/tests/dags/test_no_impersonation.py +++ b/tests/dags/test_no_impersonation.py @@ -30,7 +30,7 @@ "start_date": DEFAULT_DATE, } -dag = DAG(dag_id="test_no_impersonation", default_args=args) +dag = DAG(dag_id="test_no_impersonation", schedule=None, default_args=args) test_command = textwrap.dedent( """\ diff --git a/tests/dags/test_on_failure_callback.py b/tests/dags/test_on_failure_callback.py index 783ea53d0a261..e2f4ab9027a8c 100644 --- a/tests/dags/test_on_failure_callback.py +++ b/tests/dags/test_on_failure_callback.py @@ -31,7 +31,7 @@ "start_date": DEFAULT_DATE, } -dag = DAG(dag_id="test_on_failure_callback", default_args=args) +dag = DAG(dag_id="test_on_failure_callback", schedule=None, default_args=args) def write_data_to_callback(context): diff --git a/tests/dags/test_on_kill.py b/tests/dags/test_on_kill.py index 4e7fe7f5bd299..9b9708bef7d59 100644 --- a/tests/dags/test_on_kill.py +++ b/tests/dags/test_on_kill.py @@ -53,6 +53,6 @@ def on_kill(self): # DAG tests backfill with pooled tasks # Previously backfill would queue the task but never run it -dag1 = DAG(dag_id="test_on_kill", start_date=datetime(2015, 1, 1)) +dag1 = DAG(dag_id="test_on_kill", start_date=datetime(2015, 1, 1), schedule="@daily") dag1_task1 = DummyWithOnKill(task_id="task1", dag=dag1, owner="airflow") diff --git a/tests/dags/test_parsing_context.py b/tests/dags/test_parsing_context.py index 2a88a7829a834..ba3a3491caa3c 100644 --- a/tests/dags/test_parsing_context.py +++ b/tests/dags/test_parsing_context.py @@ -48,6 +48,6 @@ def execute(self, context: Context): self.log.info("Executed") -dag1 = DAG(dag_id="test_parsing_context", start_date=datetime(2015, 1, 1)) +dag1 = DAG(dag_id="test_parsing_context", schedule=None, start_date=datetime(2015, 1, 1)) dag1_task1 = DagWithParsingContext(task_id="task1", dag=dag1, owner="airflow") diff --git a/tests/dags/test_prev_dagrun_dep.py b/tests/dags/test_prev_dagrun_dep.py index 52e5f113aed99..8bc357a35a413 100644 --- a/tests/dags/test_prev_dagrun_dep.py +++ b/tests/dags/test_prev_dagrun_dep.py @@ -17,7 +17,7 @@ # under the License. from __future__ import annotations -from datetime import datetime +from datetime import datetime, timedelta from airflow.models.dag import DAG from airflow.operators.empty import EmptyOperator @@ -26,7 +26,7 @@ default_args = dict(start_date=DEFAULT_DATE, owner="airflow") # DAG tests depends_on_past dependencies -dag_dop = DAG(dag_id="test_depends_on_past", default_args=default_args) +dag_dop = DAG(dag_id="test_depends_on_past", schedule=timedelta(days=1), default_args=default_args) with dag_dop: dag_dop_task = EmptyOperator( task_id="test_dop_task", @@ -34,7 +34,7 @@ ) # DAG tests wait_for_downstream dependencies -dag_wfd = DAG(dag_id="test_wait_for_downstream", default_args=default_args) +dag_wfd = DAG(dag_id="test_wait_for_downstream", schedule=timedelta(days=1), default_args=default_args) with dag_wfd: dag_wfd_upstream = EmptyOperator( task_id="upstream_task", diff --git a/tests/dags/test_scheduler_dags.py b/tests/dags/test_scheduler_dags.py index e1b1bddc85c22..98748c50004d1 100644 --- a/tests/dags/test_scheduler_dags.py +++ b/tests/dags/test_scheduler_dags.py @@ -27,10 +27,18 @@ # DAG tests backfill with pooled tasks # Previously backfill would queue the task but never run it -dag1 = DAG(dag_id="test_start_date_scheduling", start_date=timezone.utcnow() + timedelta(days=1)) +dag1 = DAG( + dag_id="test_start_date_scheduling", + start_date=timezone.utcnow() + timedelta(days=1), + schedule=timedelta(days=1), +) dag1_task1 = EmptyOperator(task_id="dummy", dag=dag1, owner="airflow") -dag2 = DAG(dag_id="test_task_start_date_scheduling", start_date=DEFAULT_DATE) +dag2 = DAG( + dag_id="test_task_start_date_scheduling", + start_date=DEFAULT_DATE, + schedule=timedelta(days=1), +) dag2_task1 = EmptyOperator( task_id="dummy1", dag=dag2, owner="airflow", start_date=DEFAULT_DATE + timedelta(days=3) ) diff --git a/tests/dags/test_task_view_type_check.py b/tests/dags/test_task_view_type_check.py index cb81e410fdb24..f3414d4ac3fe5 100644 --- a/tests/dags/test_task_view_type_check.py +++ b/tests/dags/test_task_view_type_check.py @@ -51,7 +51,7 @@ def a_function(_, __): logger.info("class_instance type: %s", type(class_instance)) -dag = DAG(dag_id="test_task_view_type_check", default_args=default_args) +dag = DAG(dag_id="test_task_view_type_check", schedule=None, default_args=default_args) dag_task1 = PythonOperator( task_id="test_dagrun_functool_partial", diff --git a/tests/dags/test_zip.zip b/tests/dags/test_zip.zip index e1a58d27335a9..24db36fff8b00 100644 Binary files a/tests/dags/test_zip.zip and b/tests/dags/test_zip.zip differ diff --git a/tests/dags_corrupted/test_impersonation_custom.py b/tests/dags_corrupted/test_impersonation_custom.py index 2af20ce091d9b..03a6e3ef7d277 100644 --- a/tests/dags_corrupted/test_impersonation_custom.py +++ b/tests/dags_corrupted/test_impersonation_custom.py @@ -17,7 +17,7 @@ # under the License. from __future__ import annotations -from datetime import datetime +from datetime import datetime, timedelta # Originally, impersonation tests were incomplete missing the use case when # DAGs access custom packages usually made available through the PYTHONPATH environment @@ -35,7 +35,7 @@ args = {"owner": "airflow", "start_date": DEFAULT_DATE, "run_as_user": "airflow_test_user"} -dag = DAG(dag_id="impersonation_with_custom_pkg", default_args=args) +dag = DAG(dag_id="impersonation_with_custom_pkg", schedule=timedelta(days=1), default_args=args) def print_today(): diff --git a/tests/dags_with_system_exit/a_system_exit.py b/tests/dags_with_system_exit/a_system_exit.py index 3255e56823aab..73a433b8ab4b2 100644 --- a/tests/dags_with_system_exit/a_system_exit.py +++ b/tests/dags_with_system_exit/a_system_exit.py @@ -18,7 +18,7 @@ from __future__ import annotations import sys -from datetime import datetime +from datetime import datetime, timedelta from airflow.models.dag import DAG @@ -28,6 +28,6 @@ DEFAULT_DATE = datetime(2100, 1, 1) -dag1 = DAG(dag_id="test_system_exit", start_date=DEFAULT_DATE) +dag1 = DAG(dag_id="test_system_exit", schedule=timedelta(days=1), start_date=DEFAULT_DATE) sys.exit(-1) diff --git a/tests/dags_with_system_exit/b_test_scheduler_dags.py b/tests/dags_with_system_exit/b_test_scheduler_dags.py index 0cc81c44f8cef..765506f4c32d8 100644 --- a/tests/dags_with_system_exit/b_test_scheduler_dags.py +++ b/tests/dags_with_system_exit/b_test_scheduler_dags.py @@ -17,13 +17,13 @@ # under the License. from __future__ import annotations -from datetime import datetime +from datetime import datetime, timedelta from airflow.models.dag import DAG from airflow.operators.empty import EmptyOperator DEFAULT_DATE = datetime(2000, 1, 1) -dag1 = DAG(dag_id="exit_test_dag", start_date=DEFAULT_DATE) +dag1 = DAG(dag_id="exit_test_dag", schedule=timedelta(days=1), start_date=DEFAULT_DATE) dag1_task1 = EmptyOperator(task_id="dummy", dag=dag1, owner="airflow") diff --git a/tests/dags_with_system_exit/c_system_exit.py b/tests/dags_with_system_exit/c_system_exit.py index 88daf0fe89c3b..299eb4591bea3 100644 --- a/tests/dags_with_system_exit/c_system_exit.py +++ b/tests/dags_with_system_exit/c_system_exit.py @@ -18,7 +18,7 @@ from __future__ import annotations import sys -from datetime import datetime +from datetime import datetime, timedelta from airflow.models.dag import DAG @@ -28,6 +28,6 @@ DEFAULT_DATE = datetime(2100, 1, 1) -dag1 = DAG(dag_id="test_system_exit", start_date=DEFAULT_DATE) +dag1 = DAG(dag_id="test_system_exit", schedule=timedelta(days=1), start_date=DEFAULT_DATE) sys.exit(-1) diff --git a/tests/decorators/test_mapped.py b/tests/decorators/test_mapped.py index 5a90527987302..3812367425f8b 100644 --- a/tests/decorators/test_mapped.py +++ b/tests/decorators/test_mapped.py @@ -26,7 +26,7 @@ def test_mapped_task_group_id_prefix_task_id(): def f(z): pass - with DAG(dag_id="d", start_date=DEFAULT_DATE) as dag: + with DAG(dag_id="d", schedule=None, start_date=DEFAULT_DATE) as dag: x1 = dag.task(task_id="t1")(f).expand(z=[]) with TaskGroup("g"): x2 = dag.task(task_id="t2")(f).expand(z=[]) diff --git a/tests/decorators/test_python.py b/tests/decorators/test_python.py index 9d2b9a14c82b4..60d18fbddc601 100644 --- a/tests/decorators/test_python.py +++ b/tests/decorators/test_python.py @@ -686,7 +686,7 @@ def print_info(m1: str, m2: str, run_id: str = "") -> None: def print_everything(**kwargs) -> None: print(kwargs) - with DAG("test_mapped_decorator", start_date=DEFAULT_DATE): + with DAG("test_mapped_decorator", schedule=None, start_date=DEFAULT_DATE): t0 = print_info.expand(m1=["a", "b"], m2={"foo": "bar"}) t1 = print_info.partial(m1="hi").expand(m2=[1, 2, 3]) t2 = print_everything.partial(whatever="123").expand(any_key=[1, 2], works=t1) @@ -722,7 +722,7 @@ def product(number: int, multiple: int): literal = [1, 2, 3] - with DAG("test_dag", start_date=DEFAULT_DATE) as dag: + with DAG("test_dag", schedule=None, start_date=DEFAULT_DATE) as dag: quadrupled = product.partial(multiple=3).expand(number=literal) doubled = product.partial(multiple=2).expand(number=literal) trippled = product.partial(multiple=3).expand(number=literal) diff --git a/tests/decorators/test_task_group.py b/tests/decorators/test_task_group.py index 709a9135f56dc..6120f94af3ac7 100644 --- a/tests/decorators/test_task_group.py +++ b/tests/decorators/test_task_group.py @@ -63,7 +63,7 @@ def simple_tg(): ... def test_tooltip_derived_from_function_docstring(): """Test that the tooltip for TaskGroup is the decorated-function's docstring.""" - @dag(start_date=pendulum.datetime(2022, 1, 1)) + @dag(schedule=None, start_date=pendulum.datetime(2022, 1, 1)) def pipeline(): @task_group() def tg(): @@ -82,7 +82,7 @@ def test_tooltip_not_overridden_by_function_docstring(): docstring. """ - @dag(start_date=pendulum.datetime(2022, 1, 1)) + @dag(schedule=None, start_date=pendulum.datetime(2022, 1, 1)) def pipeline(): @task_group(tooltip="tooltip for the TaskGroup") def tg(): @@ -102,7 +102,7 @@ def test_partial_evolves_factory(): def tg(a, b): pass - @dag(start_date=pendulum.datetime(2022, 1, 1)) + @dag(schedule=None, start_date=pendulum.datetime(2022, 1, 1)) def pipeline(): nonlocal tgp tgp = tg.partial(a=1) @@ -120,7 +120,7 @@ def pipeline(): def test_expand_fail_empty(): - @dag(start_date=pendulum.datetime(2022, 1, 1)) + @dag(schedule=None, start_date=pendulum.datetime(2022, 1, 1)) def pipeline(): @task_group() def tg(): @@ -136,7 +136,7 @@ def tg(): def test_expand_create_mapped(): saved = {} - @dag(start_date=pendulum.datetime(2022, 1, 1)) + @dag(schedule=None, start_date=pendulum.datetime(2022, 1, 1)) def pipeline(): @task_group() def tg(a, b): @@ -155,7 +155,7 @@ def tg(a, b): def test_expand_kwargs_no_wildcard(): - @dag(start_date=pendulum.datetime(2022, 1, 1)) + @dag(schedule=None, start_date=pendulum.datetime(2022, 1, 1)) def pipeline(): @task_group() def tg(**kwargs): @@ -172,7 +172,7 @@ def tg(**kwargs): def test_expand_kwargs_create_mapped(): saved = {} - @dag(start_date=pendulum.datetime(2022, 1, 1)) + @dag(schedule=None, start_date=pendulum.datetime(2022, 1, 1)) def pipeline(): @task_group() def tg(a, b): @@ -241,6 +241,7 @@ def t2(): def test_override_dag_default_args(): @dag( dag_id="test_dag", + schedule=None, start_date=pendulum.parse("20200101"), default_args={ "retries": 1, @@ -270,6 +271,7 @@ def tg(): def test_override_dag_default_args_nested_tg(): @dag( dag_id="test_dag", + schedule=None, start_date=pendulum.parse("20200101"), default_args={ "retries": 1, diff --git a/tests/integration/executors/test_celery_executor.py b/tests/integration/executors/test_celery_executor.py index 03a43cc5ebef2..9c7fe96ff186d 100644 --- a/tests/integration/executors/test_celery_executor.py +++ b/tests/integration/executors/test_celery_executor.py @@ -210,7 +210,10 @@ def fake_execute_command(): # which will cause TypeError when calling task.apply_async() executor = celery_executor.CeleryExecutor() task = BashOperator( - task_id="test", bash_command="true", dag=DAG(dag_id="id"), start_date=datetime.now() + task_id="test", + bash_command="true", + dag=DAG(dag_id="id", schedule=None), + start_date=datetime.now(), ) when = datetime.now() value_tuple = ( @@ -241,7 +244,10 @@ def test_retry_on_error_sending_task(self, caplog): assert executor.task_publish_max_retries == 3, "Assert Default Max Retries is 3" task = BashOperator( - task_id="test", bash_command="true", dag=DAG(dag_id="id"), start_date=datetime.now() + task_id="test", + bash_command="true", + dag=DAG(dag_id="id", schedule=None), + start_date=datetime.now(), ) when = datetime.now() value_tuple = ( diff --git a/tests/integration/providers/redis/operators/test_redis_publish.py b/tests/integration/providers/redis/operators/test_redis_publish.py index c5ea8a65bd626..76fd02f02c0d4 100644 --- a/tests/integration/providers/redis/operators/test_redis_publish.py +++ b/tests/integration/providers/redis/operators/test_redis_publish.py @@ -34,7 +34,7 @@ class TestRedisPublishOperator: def setup_method(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE} - self.dag = DAG("test_redis_dag_id", default_args=args) + self.dag = DAG("test_redis_dag_id", schedule=None, default_args=args) self.mock_context = MagicMock() self.channel = "test" diff --git a/tests/jobs/test_triggerer_job.py b/tests/jobs/test_triggerer_job.py index 28fc00694b400..378afa0499ca4 100644 --- a/tests/jobs/test_triggerer_job.py +++ b/tests/jobs/test_triggerer_job.py @@ -90,7 +90,7 @@ def session(): def create_trigger_in_db(session, trigger, operator=None): dag_model = DagModel(dag_id="test_dag") - dag = DAG(dag_id=dag_model.dag_id, start_date=pendulum.datetime(2023, 1, 1)) + dag = DAG(dag_id=dag_model.dag_id, schedule="@daily", start_date=pendulum.datetime(2023, 1, 1)) run = DagRun( dag_id=dag_model.dag_id, run_id="test_run", diff --git a/tests/models/test_baseoperator.py b/tests/models/test_baseoperator.py index 89b268af1f8b1..48aaf2699b918 100644 --- a/tests/models/test_baseoperator.py +++ b/tests/models/test_baseoperator.py @@ -186,10 +186,16 @@ def test_trigger_rule_validation(self): from airflow.models.abstractoperator import DEFAULT_TRIGGER_RULE fail_stop_dag = DAG( - dag_id="test_dag_trigger_rule_validation", start_date=DEFAULT_DATE, fail_stop=True + dag_id="test_dag_trigger_rule_validation", + schedule=None, + start_date=DEFAULT_DATE, + fail_stop=True, ) non_fail_stop_dag = DAG( - dag_id="test_dag_trigger_rule_validation", start_date=DEFAULT_DATE, fail_stop=False + dag_id="test_dag_trigger_rule_validation", + schedule=None, + start_date=DEFAULT_DATE, + fail_stop=False, ) # An operator with default trigger rule and a fail-stop dag should be allowed @@ -305,7 +311,7 @@ def test_render_template(self, content, context, expected_output): ) def test_render_template_with_native_envs(self, content, context, expected_output): """Test render_template given various input types with Native Python types""" - with DAG("test-dag", start_date=DEFAULT_DATE, render_template_as_native_obj=True): + with DAG("test-dag", schedule=None, start_date=DEFAULT_DATE, render_template_as_native_obj=True): task = BaseOperator(task_id="op1") result = task.render_template(content, context) @@ -320,7 +326,12 @@ def __init__(self, x, **kwargs): def execute(self, context): print(self.x) - with DAG("test-dag", start_date=DEFAULT_DATE, default_args=dict(sla=timedelta(minutes=30))) as dag: + with DAG( + dag_id="test-dag", + schedule=None, + start_date=DEFAULT_DATE, + default_args={"sla": timedelta(minutes=30)}, + ) as dag: @dag.task def get_values(): @@ -331,7 +342,12 @@ def get_values(): MyOp.partial(task_id="hi").expand(x=task1) def test_mapped_dag_slas_disabled_taskflow(self): - with DAG("test-dag", start_date=DEFAULT_DATE, default_args=dict(sla=timedelta(minutes=30))) as dag: + with DAG( + dag_id="test-dag", + schedule=None, + start_date=DEFAULT_DATE, + default_args={"sla": timedelta(minutes=30)}, + ) as dag: @dag.task def get_values(): @@ -467,7 +483,7 @@ def test_email_on_actions(self): def test_cross_downstream(self): """Test if all dependencies between tasks are all set correctly.""" - dag = DAG(dag_id="test_dag", start_date=datetime.now()) + dag = DAG(dag_id="test_dag", schedule=None, start_date=datetime.now()) start_tasks = [BaseOperator(task_id=f"t{i}", dag=dag) for i in range(1, 4)] end_tasks = [BaseOperator(task_id=f"t{i}", dag=dag) for i in range(4, 7)] cross_downstream(from_tasks=start_tasks, to_tasks=end_tasks) @@ -492,7 +508,7 @@ def test_cross_downstream(self): } def test_chain(self): - dag = DAG(dag_id="test_chain", start_date=datetime.now()) + dag = DAG(dag_id="test_chain", schedule=None, start_date=datetime.now()) # Begin test for classic operators with `EdgeModifiers` [label1, label2] = [Label(label=f"label{i}") for i in range(1, 3)] @@ -549,7 +565,7 @@ def test_chain(self): assert [op2] == tgop4.get_direct_relatives(upstream=False) def test_chain_linear(self): - dag = DAG(dag_id="test_chain_linear", start_date=datetime.now()) + dag = DAG(dag_id="test_chain_linear", schedule=None, start_date=datetime.now()) t1, t2, t3, t4, t5, t6, t7 = (BaseOperator(task_id=f"t{i}", dag=dag) for i in range(1, 8)) chain_linear(t1, [t2, t3, t4], [t5, t6], t7) @@ -598,7 +614,7 @@ def test_chain_linear(self): chain_linear(t1) def test_chain_not_support_type(self): - dag = DAG(dag_id="test_chain", start_date=datetime.now()) + dag = DAG(dag_id="test_chain", schedule=None, start_date=datetime.now()) [op1, op2] = [BaseOperator(task_id=f"t{i}", dag=dag) for i in range(1, 3)] with pytest.raises(TypeError): chain([op1, op2], 1) @@ -623,7 +639,7 @@ def test_chain_not_support_type(self): chain([tg1, tg2], 1) def test_chain_different_length_iterable(self): - dag = DAG(dag_id="test_chain", start_date=datetime.now()) + dag = DAG(dag_id="test_chain", schedule=None, start_date=datetime.now()) [label1, label2] = [Label(label=f"label{i}") for i in range(1, 3)] [op1, op2, op3, op4, op5] = [BaseOperator(task_id=f"t{i}", dag=dag) for i in range(1, 6)] @@ -658,7 +674,7 @@ def test_lineage_composition(self): """ inlet = File(url="in") outlet = File(url="out") - dag = DAG("test-dag", start_date=DEFAULT_DATE) + dag = DAG("test-dag", schedule=None, start_date=DEFAULT_DATE) task1 = BaseOperator(task_id="op1", dag=dag) task2 = BaseOperator(task_id="op2", dag=dag) @@ -744,7 +760,7 @@ def test_setattr_performs_no_custom_action_at_execute_time(self): assert method_mock.call_count == 0 def test_upstream_is_set_when_template_field_is_xcomarg(self): - with DAG("xcomargs_test", default_args={"start_date": datetime.today()}): + with DAG("xcomargs_test", schedule=None, default_args={"start_date": datetime.today()}): op1 = BaseOperator(task_id="op1") op2 = MockOperator(task_id="op2", arg1=op1.output) @@ -752,7 +768,7 @@ def test_upstream_is_set_when_template_field_is_xcomarg(self): assert op2 in op1.downstream_list def test_set_xcomargs_dependencies_works_recursively(self): - with DAG("xcomargs_test", default_args={"start_date": datetime.today()}): + with DAG("xcomargs_test", schedule=None, default_args={"start_date": datetime.today()}): op1 = BaseOperator(task_id="op1") op2 = BaseOperator(task_id="op2") op3 = MockOperator(task_id="op3", arg1=[op1.output, op2.output]) @@ -764,7 +780,7 @@ def test_set_xcomargs_dependencies_works_recursively(self): assert op2 in op4.upstream_list def test_set_xcomargs_dependencies_works_when_set_after_init(self): - with DAG(dag_id="xcomargs_test", default_args={"start_date": datetime.today()}): + with DAG(dag_id="xcomargs_test", schedule=None, default_args={"start_date": datetime.today()}): op1 = BaseOperator(task_id="op1") op2 = MockOperator(task_id="op2") op2.arg1 = op1.output # value is set after init @@ -928,7 +944,7 @@ def test_task_level_retry_delay(dag_maker): def test_deepcopy(): # Test bug when copying an operator attached to a DAG - with DAG("dag0", start_date=DEFAULT_DATE) as dag: + with DAG("dag0", schedule=None, start_date=DEFAULT_DATE) as dag: @dag.task def task0(): @@ -1092,7 +1108,7 @@ def test_get_task_instances(session): second_execution_date = pendulum.datetime(2023, 1, 2) third_execution_date = pendulum.datetime(2023, 1, 3) - test_dag = DAG(dag_id="test_dag", start_date=first_execution_date) + test_dag = DAG(dag_id="test_dag", schedule=None, start_date=first_execution_date) task = BaseOperator(task_id="test_task", dag=test_dag) common_dr_kwargs = { diff --git a/tests/models/test_cleartasks.py b/tests/models/test_cleartasks.py index f13a80d1eb888..706a1c6bd677f 100644 --- a/tests/models/test_cleartasks.py +++ b/tests/models/test_cleartasks.py @@ -631,6 +631,7 @@ def test_dags_clear(self): for i in range(num_of_dags): dag = DAG( f"test_dag_clear_{i}", + schedule=datetime.timedelta(days=1), start_date=DEFAULT_DATE, end_date=DEFAULT_DATE + datetime.timedelta(days=10), ) diff --git a/tests/models/test_dag.py b/tests/models/test_dag.py index 95ec87d72bf16..0bd58ddc528d3 100644 --- a/tests/models/test_dag.py +++ b/tests/models/test_dag.py @@ -182,7 +182,7 @@ def test_params_not_passed_is_empty_dict(self): Test that when 'params' is _not_ passed to a new Dag, that the params attribute is set to an empty dictionary. """ - dag = DAG("test-dag") + dag = DAG("test-dag", schedule=None) assert isinstance(dag.params, ParamsDict) assert 0 == len(dag.params) @@ -197,7 +197,7 @@ def test_params_passed_and_params_in_default_args_no_override(self): params1 = {"parameter1": 1} params2 = {"parameter2": 2} - dag = DAG("test-dag", default_args={"params": params1}, params=params2) + dag = DAG("test-dag", schedule=None, default_args={"params": params1}, params=params2) assert params1["parameter1"] == dag.params["parameter1"] assert params2["parameter2"] == dag.params["parameter2"] @@ -210,20 +210,20 @@ def test_not_none_schedule_with_non_default_params(self): params = {"param1": Param(type="string")} with pytest.raises(AirflowException): - DAG("dummy-dag", params=params) + DAG("dummy-dag", schedule=timedelta(days=1), start_date=DEFAULT_DATE, params=params) def test_dag_invalid_default_view(self): """ Test invalid `default_view` of DAG initialization """ with pytest.raises(AirflowException, match="Invalid values of dag.default_view: only support"): - DAG(dag_id="test-invalid-default_view", default_view="airflow") + DAG(dag_id="test-invalid-default_view", schedule=None, default_view="airflow") def test_dag_default_view_default_value(self): """ Test `default_view` default value of DAG initialization """ - dag = DAG(dag_id="test-default_default_view") + dag = DAG(dag_id="test-default_default_view", schedule=None) assert conf.get("webserver", "dag_default_view").lower() == dag.default_view def test_dag_invalid_orientation(self): @@ -231,13 +231,13 @@ def test_dag_invalid_orientation(self): Test invalid `orientation` of DAG initialization """ with pytest.raises(AirflowException, match="Invalid values of dag.orientation: only support"): - DAG(dag_id="test-invalid-orientation", orientation="airflow") + DAG(dag_id="test-invalid-orientation", schedule=None, orientation="airflow") def test_dag_orientation_default_value(self): """ Test `orientation` default value of DAG initialization """ - dag = DAG(dag_id="test-default_orientation") + dag = DAG(dag_id="test-default_orientation", schedule=None) assert conf.get("webserver", "dag_orientation") == dag.orientation def test_dag_as_context_manager(self): @@ -246,8 +246,8 @@ def test_dag_as_context_manager(self): When used as a context manager, Operators are automatically added to the DAG (unless they specify a different DAG) """ - dag = DAG("dag", start_date=DEFAULT_DATE, default_args={"owner": "owner1"}) - dag2 = DAG("dag2", start_date=DEFAULT_DATE, default_args={"owner": "owner2"}) + dag = DAG("dag", schedule=None, start_date=DEFAULT_DATE, default_args={"owner": "owner1"}) + dag2 = DAG("dag2", schedule=None, start_date=DEFAULT_DATE, default_args={"owner": "owner2"}) with dag: op1 = EmptyOperator(task_id="op1") @@ -274,7 +274,7 @@ def test_dag_as_context_manager(self): assert op4.owner == "owner2" assert op5.owner == "owner1" - with DAG("creating_dag_in_cm", start_date=DEFAULT_DATE) as dag: + with DAG("creating_dag_in_cm", schedule=None, start_date=DEFAULT_DATE) as dag: EmptyOperator(task_id="op6") assert dag.dag_id == "creating_dag_in_cm" @@ -292,15 +292,15 @@ def test_dag_as_context_manager(self): assert op9.dag == dag2 def test_dag_topological_sort_dag_without_tasks(self): - dag = DAG("dag", start_date=DEFAULT_DATE, default_args={"owner": "owner1"}) + dag = DAG("dag", schedule=None, start_date=DEFAULT_DATE, default_args={"owner": "owner1"}) assert () == dag.topological_sort() def test_dag_naive_start_date_string(self): - DAG("DAG", default_args={"start_date": "2019-06-01"}) + DAG("DAG", schedule=None, default_args={"start_date": "2019-06-01"}) def test_dag_naive_start_end_dates_strings(self): - DAG("DAG", default_args={"start_date": "2019-06-01", "end_date": "2019-06-05"}) + DAG("DAG", schedule=None, default_args={"start_date": "2019-06-01", "end_date": "2019-06-05"}) def test_dag_start_date_propagates_to_end_date(self): """ @@ -314,15 +314,17 @@ def test_dag_start_date_propagates_to_end_date(self): An explicit check the `tzinfo` attributes for both are the same is an extra check. """ dag = DAG( - "DAG", default_args={"start_date": "2019-06-05T00:00:00+05:00", "end_date": "2019-06-05T00:00:00"} + "DAG", + schedule=None, + default_args={"start_date": "2019-06-05T00:00:00+05:00", "end_date": "2019-06-05T00:00:00"}, ) assert dag.default_args["start_date"] == dag.default_args["end_date"] assert dag.default_args["start_date"].tzinfo == dag.default_args["end_date"].tzinfo def test_dag_naive_default_args_start_date(self): - dag = DAG("DAG", default_args={"start_date": datetime.datetime(2018, 1, 1)}) + dag = DAG("DAG", schedule=None, default_args={"start_date": datetime.datetime(2018, 1, 1)}) assert dag.timezone == settings.TIMEZONE - dag = DAG("DAG", start_date=datetime.datetime(2018, 1, 1)) + dag = DAG("DAG", schedule=None, start_date=datetime.datetime(2018, 1, 1)) assert dag.timezone == settings.TIMEZONE def test_dag_none_default_args_start_date(self): @@ -330,7 +332,7 @@ def test_dag_none_default_args_start_date(self): Tests if a start_date of None in default_args works. """ - dag = DAG("DAG", default_args={"start_date": None}) + dag = DAG("DAG", schedule=None, default_args={"start_date": None}) assert dag.timezone == settings.TIMEZONE def test_dag_task_priority_weight_total(self): @@ -341,7 +343,7 @@ def test_dag_task_priority_weight_total(self): # Fully connected parallel tasks. i.e. every task at each parallel # stage is dependent on every task in the previous stage. # Default weight should be calculated using downstream descendants - with DAG("dag", start_date=DEFAULT_DATE, default_args={"owner": "owner1"}) as dag: + with DAG("dag", schedule=None, start_date=DEFAULT_DATE, default_args={"owner": "owner1"}) as dag: pipeline = [ [EmptyOperator(task_id=f"stage{i}.{j}", priority_weight=weight) for j in range(width)] for i in range(depth) @@ -365,7 +367,7 @@ def test_dag_task_priority_weight_total_using_upstream(self): width = 5 depth = 5 pattern = re.compile("stage(\\d*).(\\d*)") - with DAG("dag", start_date=DEFAULT_DATE, default_args={"owner": "owner1"}) as dag: + with DAG("dag", schedule=None, start_date=DEFAULT_DATE, default_args={"owner": "owner1"}) as dag: pipeline = [ [ EmptyOperator( @@ -395,7 +397,7 @@ def test_dag_task_priority_weight_total_using_absolute(self): weight = 10 width = 5 depth = 5 - with DAG("dag", start_date=DEFAULT_DATE, default_args={"owner": "owner1"}) as dag: + with DAG("dag", schedule=None, start_date=DEFAULT_DATE, default_args={"owner": "owner1"}) as dag: pipeline = [ [ EmptyOperator( @@ -419,7 +421,7 @@ def test_dag_task_priority_weight_total_using_absolute(self): def test_dag_task_invalid_weight_rule(self): # Test if we enter an invalid weight rule - with DAG("dag", start_date=DEFAULT_DATE, default_args={"owner": "owner1"}): + with DAG("dag", schedule=None, start_date=DEFAULT_DATE, default_args={"owner": "owner1"}): with pytest.raises(AirflowException): EmptyOperator(task_id="should_fail", weight_rule="no rule") @@ -432,7 +434,7 @@ def test_dag_task_invalid_weight_rule(self): ) def test_dag_task_custom_weight_strategy(self, cls, expected): with mock_plugin_manager(plugins=[TestPriorityWeightStrategyPlugin]), DAG( - "dag", start_date=DEFAULT_DATE, default_args={"owner": "owner1"} + "dag", schedule=None, start_date=DEFAULT_DATE, default_args={"owner": "owner1"} ) as dag: task = EmptyOperator( task_id="empty_task", @@ -446,7 +448,7 @@ def test_dag_task_custom_weight_strategy(self, cls, expected): def test_dag_task_not_registered_weight_strategy(self): with mock_plugin_manager(plugins=[TestPriorityWeightStrategyPlugin]), DAG( - "dag", start_date=DEFAULT_DATE, default_args={"owner": "owner1"} + "dag", schedule=None, start_date=DEFAULT_DATE, default_args={"owner": "owner1"} ): with pytest.raises(AirflowException, match="Unknown priority strategy"): EmptyOperator( @@ -458,7 +460,7 @@ def test_get_num_task_instances(self): test_dag_id = "test_get_num_task_instances_dag" test_task_id = "task_1" - test_dag = DAG(dag_id=test_dag_id, start_date=DEFAULT_DATE) + test_dag = DAG(dag_id=test_dag_id, schedule=None, start_date=DEFAULT_DATE) test_task = EmptyOperator(task_id=test_task_id, dag=test_dag) dr1 = test_dag.create_dagrun( @@ -535,7 +537,7 @@ def test_get_task_instances_before(self): test_dag_id = "test_get_task_instances_before" test_task_id = "the_task" - test_dag = DAG(dag_id=test_dag_id, start_date=BASE_DATE) + test_dag = DAG(dag_id=test_dag_id, schedule=None, start_date=BASE_DATE) EmptyOperator(task_id=test_task_id, dag=test_dag) session = settings.Session() @@ -654,6 +656,7 @@ def jinja_udf(name): dag = DAG( "test-dag", + schedule=None, start_date=DEFAULT_DATE, user_defined_filters={"hello": jinja_udf}, user_defined_macros={"foo": "bar"}, @@ -665,7 +668,11 @@ def jinja_udf(name): assert jinja_env.globals["foo"] == "bar" def test_set_jinja_env_additional_option(self): - dag = DAG("test-dag", jinja_environment_kwargs={"keep_trailing_newline": True, "cache_size": 50}) + dag = DAG( + dag_id="test-dag", + schedule=None, + jinja_environment_kwargs={"keep_trailing_newline": True, "cache_size": 50}, + ) jinja_env = dag.get_template_env() assert jinja_env.keep_trailing_newline is True assert jinja_env.cache.capacity == 50 @@ -673,7 +680,7 @@ def test_set_jinja_env_additional_option(self): assert jinja_env.undefined is jinja2.StrictUndefined def test_template_undefined(self): - dag = DAG("test-dag", template_undefined=jinja2.Undefined) + dag = DAG("test-dag", schedule=None, template_undefined=jinja2.Undefined) jinja_env = dag.get_template_env() assert jinja_env.undefined is jinja2.Undefined @@ -687,7 +694,7 @@ def test_template_undefined(self): ], ) def test_template_env(self, use_native_obj, force_sandboxed, expected_env): - dag = DAG("test-dag", render_template_as_native_obj=use_native_obj) + dag = DAG("test-dag", schedule=None, render_template_as_native_obj=use_native_obj) jinja_env = dag.get_template_env(force_sandboxed=force_sandboxed) assert isinstance(jinja_env, expected_env) @@ -695,7 +702,12 @@ def test_resolve_template_files_value(self, tmp_path): path = tmp_path / "testfile.template" path.write_text("{{ ds }}") - with DAG("test-dag", start_date=DEFAULT_DATE, template_searchpath=os.fspath(path.parent)): + with DAG( + dag_id="test-dag", + schedule=None, + start_date=DEFAULT_DATE, + template_searchpath=os.fspath(path.parent), + ): task = EmptyOperator(task_id="op1") task.test_field = path.name @@ -709,7 +721,12 @@ def test_resolve_template_files_list(self, tmp_path): path = tmp_path / "testfile.template" path.write_text("{{ ds }}") - with DAG("test-dag", start_date=DEFAULT_DATE, template_searchpath=os.fspath(path.parent)): + with DAG( + dag_id="test-dag", + schedule=None, + start_date=DEFAULT_DATE, + template_searchpath=os.fspath(path.parent), + ): task = EmptyOperator(task_id="op1") task.test_field = [path.name, "some_string"] @@ -923,7 +940,7 @@ def test_following_schedule_datetime_timezone(self): def test_create_dagrun_when_schedule_is_none_and_empty_start_date(self): # Check that we don't get an AttributeError 'start_date' for self.start_date when schedule is none - dag = DAG("dag_with_none_schedule_and_empty_start_date") + dag = DAG("dag_with_none_schedule_and_empty_start_date", schedule=None) dag.add_task(BaseOperator(task_id="task_without_start_date")) dagrun = dag.create_dagrun( state=State.RUNNING, @@ -968,7 +985,7 @@ def tzname(self, dt): def test_dagtag_repr(self): clear_db_dags() - dag = DAG("dag-test-dagtag", start_date=DEFAULT_DATE, tags=["tag-1", "tag-2"]) + dag = DAG("dag-test-dagtag", schedule=None, start_date=DEFAULT_DATE, tags=["tag-1", "tag-2"]) dag.sync_to_db() with create_session() as session: assert {"tag-1", "tag-2"} == { @@ -977,7 +994,10 @@ def test_dagtag_repr(self): def test_bulk_write_to_db(self): clear_db_dags() - dags = [DAG(f"dag-bulk-sync-{i}", start_date=DEFAULT_DATE, tags=["test-dag"]) for i in range(4)] + dags = [ + DAG(f"dag-bulk-sync-{i}", schedule=None, start_date=DEFAULT_DATE, tags=["test-dag"]) + for i in range(4) + ] with assert_queries_count(5): DAG.bulk_write_to_db(dags) @@ -1057,7 +1077,10 @@ def test_bulk_write_to_db_single_dag(self): Test bulk_write_to_db for a single dag using the index optimized query """ clear_db_dags() - dags = [DAG(f"dag-bulk-sync-{i}", start_date=DEFAULT_DATE, tags=["test-dag"]) for i in range(1)] + dags = [ + DAG(f"dag-bulk-sync-{i}", schedule=None, start_date=DEFAULT_DATE, tags=["test-dag"]) + for i in range(1) + ] with assert_queries_count(5): DAG.bulk_write_to_db(dags) @@ -1081,7 +1104,10 @@ def test_bulk_write_to_db_multiple_dags(self): Test bulk_write_to_db for multiple dags which does not use the index optimized query """ clear_db_dags() - dags = [DAG(f"dag-bulk-sync-{i}", start_date=DEFAULT_DATE, tags=["test-dag"]) for i in range(4)] + dags = [ + DAG(f"dag-bulk-sync-{i}", schedule=None, start_date=DEFAULT_DATE, tags=["test-dag"]) + for i in range(4) + ] with assert_queries_count(5): DAG.bulk_write_to_db(dags) @@ -1125,7 +1151,11 @@ def test_bulk_write_to_db_max_active_runs(self, state): Test that DagModel.next_dagrun_create_after is set to NULL when the dag cannot be created due to max active runs being hit. """ - dag = DAG(dag_id="test_scheduler_verify_max_active_runs", start_date=DEFAULT_DATE) + dag = DAG( + dag_id="test_scheduler_verify_max_active_runs", + schedule=timedelta(days=1), + start_date=DEFAULT_DATE, + ) dag.max_active_runs = 1 EmptyOperator(task_id="dummy", dag=dag, owner="airflow") @@ -1161,7 +1191,7 @@ def test_bulk_write_to_db_has_import_error(self): """ Test that DagModel.has_import_error is set to false if no import errors. """ - dag = DAG(dag_id="test_has_import_error", start_date=DEFAULT_DATE) + dag = DAG(dag_id="test_has_import_error", schedule=None, start_date=DEFAULT_DATE) EmptyOperator(task_id="dummy", dag=dag, owner="airflow") @@ -1201,7 +1231,7 @@ def test_bulk_write_to_db_datasets(self): d3 = Dataset("s3://dataset/3") dag1 = DAG(dag_id=dag_id1, start_date=DEFAULT_DATE, schedule=[d1]) EmptyOperator(task_id=task_id, dag=dag1, outlets=[d2, d3]) - dag2 = DAG(dag_id=dag_id2, start_date=DEFAULT_DATE) + dag2 = DAG(dag_id=dag_id2, start_date=DEFAULT_DATE, schedule=None) EmptyOperator(task_id=task_id, dag=dag2, outlets=[Dataset(uri1, extra={"should": "be used"})]) session = settings.Session() dag1.clear() @@ -1234,7 +1264,7 @@ def test_bulk_write_to_db_datasets(self): # so let's remove some references and see what happens dag1 = DAG(dag_id=dag_id1, start_date=DEFAULT_DATE, schedule=None) EmptyOperator(task_id=task_id, dag=dag1, outlets=[d2]) - dag2 = DAG(dag_id=dag_id2, start_date=DEFAULT_DATE) + dag2 = DAG(dag_id=dag_id2, start_date=DEFAULT_DATE, schedule=None) EmptyOperator(task_id=task_id, dag=dag2) DAG.bulk_write_to_db([dag1, dag2], session=session) session.commit() @@ -1331,10 +1361,7 @@ def test_bulk_write_to_db_dataset_aliases(self): assert len(stored_dataset_aliases) == 3 def test_sync_to_db(self): - dag = DAG( - "dag", - start_date=DEFAULT_DATE, - ) + dag = DAG("dag", start_date=DEFAULT_DATE, schedule=None) with dag: EmptyOperator(task_id="task", owner="owner1") EmptyOperator(task_id="task2", owner="owner2") @@ -1350,11 +1377,7 @@ def test_sync_to_db(self): session.close() def test_sync_to_db_default_view(self): - dag = DAG( - "dag", - start_date=DEFAULT_DATE, - default_view="graph", - ) + dag = DAG("dag", schedule=None, start_date=DEFAULT_DATE, default_view="graph") with dag: EmptyOperator(task_id="task", owner="owner1") session = settings.Session() @@ -1366,17 +1389,17 @@ def test_sync_to_db_default_view(self): session.close() def test_existing_dag_is_paused_upon_creation(self): - dag = DAG("dag_paused") + dag = DAG("dag_paused", schedule=None) dag.sync_to_db() assert not dag.get_is_paused() - dag = DAG("dag_paused", is_paused_upon_creation=True) + dag = DAG("dag_paused", schedule=None, is_paused_upon_creation=True) dag.sync_to_db() # Since the dag existed before, it should not follow the pause flag upon creation assert not dag.get_is_paused() def test_new_dag_is_paused_upon_creation(self): - dag = DAG("new_nonexisting_dag", is_paused_upon_creation=True) + dag = DAG("new_nonexisting_dag", schedule=None, is_paused_upon_creation=True) session = settings.Session() dag.sync_to_db(session=session) @@ -1395,10 +1418,10 @@ def test_existing_dag_is_paused_config(self): # config should be set properly assert conf.getint("core", "max_consecutive_failed_dag_runs_per_dag") == 4 # checking the default value is coming from config - dag = DAG("test_dag") + dag = DAG("test_dag", schedule=None) assert dag.max_consecutive_failed_dag_runs == 4 # but we can override the value using params - dag = DAG("test_dag2", max_consecutive_failed_dag_runs=2) + dag = DAG("test_dag2", schedule=None, max_consecutive_failed_dag_runs=2) assert dag.max_consecutive_failed_dag_runs == 2 def test_existing_dag_is_paused_after_limit(self): @@ -1415,7 +1438,7 @@ def add_failed_dag_run(id, execution_date): dr.update_state(session=session) dag_id = "dag_paused_after_limit" - dag = DAG(dag_id, is_paused_upon_creation=False, max_consecutive_failed_dag_runs=2) + dag = DAG(dag_id, schedule=None, is_paused_upon_creation=False, max_consecutive_failed_dag_runs=2) op1 = BashOperator(task_id="task", bash_command="exit 1;") dag.add_task(op1) session = settings.Session() @@ -1443,10 +1466,7 @@ def test_existing_dag_default_view(self): def test_dag_is_deactivated_upon_dagfile_deletion(self): dag_id = "old_existing_dag" dag_fileloc = "/usr/local/airflow/dags/non_existing_path.py" - dag = DAG( - dag_id, - is_paused_upon_creation=True, - ) + dag = DAG(dag_id, schedule=None, is_paused_upon_creation=True) dag.fileloc = dag_fileloc session = settings.Session() with mock.patch("airflow.models.dag.DagCode.bulk_sync_to_db"): @@ -1472,15 +1492,15 @@ def test_dag_naive_default_args_start_date_with_timezone(self): local_tz = pendulum.timezone("Europe/Zurich") default_args = {"start_date": datetime.datetime(2018, 1, 1, tzinfo=local_tz)} - dag = DAG("DAG", default_args=default_args) + dag = DAG("DAG", schedule=None, default_args=default_args) assert dag.timezone.name == local_tz.name - dag = DAG("DAG", default_args=default_args) + dag = DAG("DAG", schedule=None, default_args=default_args) assert dag.timezone.name == local_tz.name def test_roots(self): """Verify if dag.roots returns the root tasks of a DAG.""" - with DAG("test_dag", start_date=DEFAULT_DATE) as dag: + with DAG("test_dag", schedule=None, start_date=DEFAULT_DATE) as dag: op1 = EmptyOperator(task_id="t1") op2 = EmptyOperator(task_id="t2") op3 = EmptyOperator(task_id="t3") @@ -1492,7 +1512,7 @@ def test_roots(self): def test_leaves(self): """Verify if dag.leaves returns the leaf tasks of a DAG.""" - with DAG("test_dag", start_date=DEFAULT_DATE) as dag: + with DAG("test_dag", schedule=None, start_date=DEFAULT_DATE) as dag: op1 = EmptyOperator(task_id="t1") op2 = EmptyOperator(task_id="t2") op3 = EmptyOperator(task_id="t3") @@ -1504,7 +1524,7 @@ def test_leaves(self): def test_tree_view(self): """Verify correctness of dag.tree_view().""" - with DAG("test_dag", start_date=DEFAULT_DATE) as dag: + with DAG("test_dag", schedule=None, start_date=DEFAULT_DATE) as dag: op1_a = EmptyOperator(task_id="t1_a") op1_b = EmptyOperator(task_id="t1_b") op2 = EmptyOperator(task_id="t2") @@ -1532,7 +1552,7 @@ def test_tree_view(self): def test_duplicate_task_ids_not_allowed_with_dag_context_manager(self): """Verify tasks with Duplicate task_id raises error""" - with DAG("test_dag", start_date=DEFAULT_DATE) as dag: + with DAG("test_dag", schedule=None, start_date=DEFAULT_DATE) as dag: op1 = EmptyOperator(task_id="t1") with pytest.raises(DuplicateTaskIdFound, match="Task id 't1' has already been added to the DAG"): BashOperator(task_id="t1", bash_command="sleep 1") @@ -1541,7 +1561,7 @@ def test_duplicate_task_ids_not_allowed_with_dag_context_manager(self): def test_duplicate_task_ids_not_allowed_without_dag_context_manager(self): """Verify tasks with Duplicate task_id raises error""" - dag = DAG("test_dag", start_date=DEFAULT_DATE) + dag = DAG("test_dag", schedule=None, start_date=DEFAULT_DATE) op1 = EmptyOperator(task_id="t1", dag=dag) with pytest.raises(DuplicateTaskIdFound, match="Task id 't1' has already been added to the DAG"): EmptyOperator(task_id="t1", dag=dag) @@ -1550,7 +1570,7 @@ def test_duplicate_task_ids_not_allowed_without_dag_context_manager(self): def test_duplicate_task_ids_for_same_task_is_allowed(self): """Verify that same tasks with Duplicate task_id do not raise error""" - with DAG("test_dag", start_date=DEFAULT_DATE) as dag: + with DAG("test_dag", schedule=None, start_date=DEFAULT_DATE) as dag: op1 = op2 = EmptyOperator(task_id="t1") op3 = EmptyOperator(task_id="t3") op1 >> op3 @@ -1561,7 +1581,7 @@ def test_duplicate_task_ids_for_same_task_is_allowed(self): assert dag.task_dict == {op2.task_id: op2, op3.task_id: op3} def test_partial_subset_updates_all_references_while_deepcopy(self): - with DAG("test_dag", start_date=DEFAULT_DATE) as dag: + with DAG("test_dag", schedule=None, start_date=DEFAULT_DATE) as dag: op1 = EmptyOperator(task_id="t1") op2 = EmptyOperator(task_id="t2") op3 = EmptyOperator(task_id="t3") @@ -1575,7 +1595,7 @@ def test_partial_subset_updates_all_references_while_deepcopy(self): assert "t3" not in partial.task_group.used_group_ids def test_partial_subset_taskgroup_join_ids(self): - with DAG("test_dag", start_date=DEFAULT_DATE) as dag: + with DAG("test_dag", schedule=None, start_date=DEFAULT_DATE) as dag: start = EmptyOperator(task_id="start") with TaskGroup(group_id="outer", prefix_group_id=False) as outer_group: with TaskGroup(group_id="tg1", prefix_group_id=False) as tg1: @@ -1605,7 +1625,7 @@ def test_schedule_dag_no_previous_runs(self): Tests scheduling a dag with no previous runs """ dag_id = "test_schedule_dag_no_previous_runs" - dag = DAG(dag_id=dag_id) + dag = DAG(dag_id=dag_id, schedule=None) dag.add_task(BaseOperator(task_id="faketastic", owner="Also fake", start_date=TEST_DATE)) dag_run = dag.create_dagrun( @@ -1636,6 +1656,7 @@ def test_dag_handle_callback_crash(self, mock_stats): mock_callback_with_exception.side_effect = Exception dag = DAG( dag_id=dag_id, + schedule=None, # callback with invalid signature should not cause crashes on_success_callback=lambda: 1, on_failure_callback=mock_callback_with_exception, @@ -1668,6 +1689,7 @@ def test_dag_handle_callback_with_removed_task(self, dag_maker, session): mock_callback = mock.MagicMock() with DAG( dag_id=dag_id, + schedule=None, on_success_callback=mock_callback, on_failure_callback=mock_callback, ) as dag: @@ -1779,7 +1801,7 @@ def test_fractional_seconds(self): def test_pickling(self): test_dag_id = "test_pickling" args = {"owner": "airflow", "start_date": DEFAULT_DATE} - dag = DAG(test_dag_id, default_args=args) + dag = DAG(test_dag_id, schedule=None, default_args=args) dag_pickle = dag.pickle() assert dag_pickle.pickle.dag_id == dag.dag_id @@ -1790,15 +1812,15 @@ class DAGsubclass(DAG): pass args = {"owner": "airflow", "start_date": DEFAULT_DATE} - dag = DAG(test_dag_id, default_args=args) + dag = DAG(test_dag_id, schedule=None, default_args=args) - dag_eq = DAG(test_dag_id, default_args=args) + dag_eq = DAG(test_dag_id, schedule=None, default_args=args) - dag_diff_load_time = DAG(test_dag_id, default_args=args) - dag_diff_name = DAG(test_dag_id + "_neq", default_args=args) + dag_diff_load_time = DAG(test_dag_id, schedule=None, default_args=args) + dag_diff_name = DAG(test_dag_id + "_neq", schedule=None, default_args=args) - dag_subclass = DAGsubclass(test_dag_id, default_args=args) - dag_subclass_diff_name = DAGsubclass(test_dag_id + "2", default_args=args) + dag_subclass = DAGsubclass(test_dag_id, schedule=None, default_args=args) + dag_subclass_diff_name = DAGsubclass(test_dag_id + "2", schedule=None, default_args=args) for dag_ in [dag_eq, dag_diff_name, dag_subclass, dag_subclass_diff_name]: dag_.last_loaded = dag.last_loaded @@ -1834,7 +1856,7 @@ class DAGsubclass(DAG): def test_get_paused_dag_ids(self): dag_id = "test_get_paused_dag_ids" - dag = DAG(dag_id, is_paused_upon_creation=True) + dag = DAG(dag_id, schedule=None, is_paused_upon_creation=True) dag.sync_to_db() assert DagModel.get_dagmodel(dag_id) is not None @@ -1921,7 +1943,7 @@ def test_description_from_timetable(self, timetable, expected_description): assert dag.timetable.description == expected_description def test_create_dagrun_run_id_is_generated(self): - dag = DAG(dag_id="run_id_is_generated") + dag = DAG(dag_id="run_id_is_generated", schedule=None) dr = dag.create_dagrun( run_type=DagRunType.MANUAL, execution_date=DEFAULT_DATE, @@ -1931,7 +1953,7 @@ def test_create_dagrun_run_id_is_generated(self): assert dr.run_id == f"manual__{DEFAULT_DATE.isoformat()}" def test_create_dagrun_run_type_is_obtained_from_run_id(self): - dag = DAG(dag_id="run_type_is_obtained_from_run_id") + dag = DAG(dag_id="run_type_is_obtained_from_run_id", schedule=None) dr = dag.create_dagrun(run_id="scheduled__", state=State.NONE) assert dr.run_type == DagRunType.SCHEDULED @@ -1940,7 +1962,7 @@ def test_create_dagrun_run_type_is_obtained_from_run_id(self): def test_create_dagrun_job_id_is_set(self): job_id = 42 - dag = DAG(dag_id="test_create_dagrun_job_id_is_set") + dag = DAG(dag_id="test_create_dagrun_job_id_is_set", schedule=None) dr = dag.create_dagrun( run_id="test_create_dagrun_job_id_is_set", state=State.NONE, creating_job_id=job_id ) @@ -1955,7 +1977,10 @@ def test_dag_add_task_checks_trigger_rule(self): task_id="task_with_non_default_trigger_rule", trigger_rule=TriggerRule.ALWAYS ) non_fail_stop_dag = DAG( - dag_id="test_dag_add_task_checks_trigger_rule", start_date=DEFAULT_DATE, fail_stop=False + dag_id="test_dag_add_task_checks_trigger_rule", + schedule=None, + start_date=DEFAULT_DATE, + fail_stop=False, ) non_fail_stop_dag.add_task(task_with_non_default_trigger_rule) @@ -1963,7 +1988,10 @@ def test_dag_add_task_checks_trigger_rule(self): from airflow.models.abstractoperator import DEFAULT_TRIGGER_RULE fail_stop_dag = DAG( - dag_id="test_dag_add_task_checks_trigger_rule", start_date=DEFAULT_DATE, fail_stop=True + dag_id="test_dag_add_task_checks_trigger_rule", + schedule=None, + start_date=DEFAULT_DATE, + fail_stop=True, ) task_with_default_trigger_rule = EmptyOperator( task_id="task_with_default_trigger_rule", trigger_rule=DEFAULT_TRIGGER_RULE @@ -1975,7 +2003,7 @@ def test_dag_add_task_checks_trigger_rule(self): fail_stop_dag.add_task(task_with_non_default_trigger_rule) def test_dag_add_task_sets_default_task_group(self): - dag = DAG(dag_id="test_dag_add_task_sets_default_task_group", start_date=DEFAULT_DATE) + dag = DAG(dag_id="test_dag_add_task_sets_default_task_group", schedule=None, start_date=DEFAULT_DATE) task_without_task_group = EmptyOperator(task_id="task_without_group_id") default_task_group = TaskGroupContext.get_current_task_group(dag) dag.add_task(task_without_task_group) @@ -1992,7 +2020,7 @@ def test_clear_set_dagrun_state(self, dag_run_state): dag_id = "test_clear_set_dagrun_state" self._clean_up(dag_id) task_id = "t1" - dag = DAG(dag_id, start_date=DEFAULT_DATE, max_active_runs=1) + dag = DAG(dag_id, schedule=None, start_date=DEFAULT_DATE, max_active_runs=1) t_1 = EmptyOperator(task_id=task_id, dag=dag) session = settings.Session() @@ -2016,15 +2044,7 @@ def test_clear_set_dagrun_state(self, dag_run_state): session=session, ) - dagruns = ( - session.query( - DagRun, - ) - .filter( - DagRun.dag_id == dag_id, - ) - .all() - ) + dagruns = session.query(DagRun).filter(DagRun.dag_id == dag_id).all() assert len(dagruns) == 1 dagrun: DagRun = dagruns[0] @@ -2036,7 +2056,7 @@ def test_clear_set_dagrun_state_for_mapped_task(self, dag_run_state): self._clean_up(dag_id) task_id = "t1" - dag = DAG(dag_id, start_date=DEFAULT_DATE, max_active_runs=1) + dag = DAG(dag_id, schedule=None, start_date=DEFAULT_DATE, max_active_runs=1) @dag.task def make_arg_lists(): @@ -2081,22 +2101,14 @@ def consumer(value): assert upstream_ti.state is None # cleared assert ti.state is None # cleared assert ti2.state == State.SUCCESS # not cleared - dagruns = ( - session.query( - DagRun, - ) - .filter( - DagRun.dag_id == dag_id, - ) - .all() - ) + dagruns = session.query(DagRun).filter(DagRun.dag_id == dag_id).all() assert len(dagruns) == 1 dagrun: DagRun = dagruns[0] assert dagrun.state == dag_run_state def test_dag_test_basic(self): - dag = DAG(dag_id="test_local_testing_conn_file", start_date=DEFAULT_DATE) + dag = DAG(dag_id="test_local_testing_conn_file", schedule=None, start_date=DEFAULT_DATE) mock_object = mock.MagicMock() @task_decorator @@ -2111,7 +2123,7 @@ def check_task(): mock_object.assert_called_once() def test_dag_test_with_dependencies(self): - dag = DAG(dag_id="test_local_testing_conn_file", start_date=DEFAULT_DATE) + dag = DAG(dag_id="test_local_testing_conn_file", schedule=None, start_date=DEFAULT_DATE) mock_object = mock.MagicMock() @task_decorator @@ -2146,6 +2158,7 @@ def handle_dag_failure(context): default_args={"on_failure_callback": handle_task_failure}, on_failure_callback=handle_dag_failure, start_date=DEFAULT_DATE, + schedule=None, ) mock_task_object_1 = mock.MagicMock() @@ -2172,7 +2185,7 @@ def check_task_2(my_input): mock_task_object_2.assert_not_called() def test_dag_test_with_task_mapping(self): - dag = DAG(dag_id="test_local_testing_conn_file", start_date=DEFAULT_DATE) + dag = DAG(dag_id="test_local_testing_conn_file", schedule=None, start_date=DEFAULT_DATE) mock_object = mock.MagicMock() @task_decorator() @@ -2198,7 +2211,7 @@ def test_dag_connection_file(self, tmp_path): - conn_id: my_postgres_conn conn_type: postgres """ - dag = DAG(dag_id="test_local_testing_conn_file", start_date=DEFAULT_DATE) + dag = DAG(dag_id="test_local_testing_conn_file", schedule=None, start_date=DEFAULT_DATE) @task_decorator def check_task(): @@ -2230,7 +2243,7 @@ def test_clear_dag( dag_id = "test_clear_dag" self._clean_up(dag_id) task_id = "t1" - dag = DAG(dag_id, start_date=DEFAULT_DATE, max_active_runs=1) + dag = DAG(dag_id, schedule=None, start_date=DEFAULT_DATE, max_active_runs=1) t_1 = EmptyOperator(task_id=task_id, dag=dag) session = settings.Session() # type: ignore @@ -2254,15 +2267,7 @@ def test_clear_dag( session=session, ) - task_instances = ( - session.query( - TI, - ) - .filter( - TI.dag_id == dag_id, - ) - .all() - ) + task_instances = session.query(TI).filter(TI.dag_id == dag_id).all() assert len(task_instances) == 1 task_instance: TI = task_instances[0] @@ -2521,7 +2526,7 @@ def test_replace_outdated_access_control_actions(self): } with pytest.warns(DeprecationWarning) as deprecation_warnings: - dag = DAG(dag_id="dag_with_outdated_perms", access_control=outdated_permissions) + dag = DAG(dag_id="dag_with_outdated_perms", schedule=None, access_control=outdated_permissions) assert dag.access_control == updated_permissions assert len(deprecation_warnings) == 2 assert "permission is deprecated" in str(deprecation_warnings[0].message) @@ -2535,11 +2540,7 @@ def test_replace_outdated_access_control_actions(self): assert "permission is deprecated" in str(deprecation_warnings[1].message) def test_validate_executor_field_executor_not_configured(self): - dag = DAG( - "test-dag", - schedule=None, - ) - + dag = DAG("test-dag", schedule=None) EmptyOperator(task_id="t1", dag=dag, executor="test.custom.executor") with pytest.raises( UnknownExecutorException, @@ -2549,11 +2550,7 @@ def test_validate_executor_field_executor_not_configured(self): def test_validate_executor_field(self): with patch.object(ExecutorLoader, "lookup_executor_name_by_str"): - dag = DAG( - "test-dag", - schedule=None, - ) - + dag = DAG("test-dag", schedule=None) EmptyOperator(task_id="t1", dag=dag, executor="test.custom.executor") dag.validate() @@ -2591,6 +2588,7 @@ def test_validate_params_on_trigger_dag(self): def test_dag_owner_links(self): dag = DAG( "dag", + schedule=None, start_date=DEFAULT_DATE, owner_links={"owner1": "https://mylink.com", "owner2": "mailto:someone@yoursite.com"}, ) @@ -2604,10 +2602,7 @@ def test_dag_owner_links(self): assert orm_dag_owners == expected_owners # Test dag owner links are removed completely - dag = DAG( - "dag", - start_date=DEFAULT_DATE, - ) + dag = DAG("dag", schedule=None, start_date=DEFAULT_DATE) dag.sync_to_db(session=session) orm_dag_owners = session.query(DagOwnerAttributes).all() @@ -2615,7 +2610,7 @@ def test_dag_owner_links(self): # Check wrong formatted owner link with pytest.raises(AirflowException): - DAG("dag", start_date=DEFAULT_DATE, owner_links={"owner1": "my-bad-link"}) + DAG("dag", schedule=None, start_date=DEFAULT_DATE, owner_links={"owner1": "my-bad-link"}) @pytest.mark.parametrize( "kwargs", @@ -2661,7 +2656,7 @@ def teardown_method(self): self._clean() def test_dags_needing_dagruns_not_too_early(self): - dag = DAG(dag_id="far_future_dag", start_date=timezone.datetime(2038, 1, 1)) + dag = DAG(dag_id="far_future_dag", schedule=None, start_date=timezone.datetime(2038, 1, 1)) EmptyOperator(task_id="dummy", dag=dag, owner="airflow") session = settings.Session() @@ -2774,7 +2769,11 @@ def test_dags_needing_dagruns_dataset_aliases(self, dag_maker, session): assert dag_models == [dag_model] def test_max_active_runs_not_none(self): - dag = DAG(dag_id="test_max_active_runs_not_none", start_date=timezone.datetime(2038, 1, 1)) + dag = DAG( + dag_id="test_max_active_runs_not_none", + schedule=None, + start_date=timezone.datetime(2038, 1, 1), + ) EmptyOperator(task_id="dummy", dag=dag, owner="airflow") session = settings.Session() @@ -2798,7 +2797,7 @@ def test_dags_needing_dagruns_only_unpaused(self): """ We should never create dagruns for unpaused DAGs """ - dag = DAG(dag_id="test_dags", start_date=DEFAULT_DATE) + dag = DAG(dag_id="test_dags", schedule=None, start_date=DEFAULT_DATE) EmptyOperator(task_id="dummy", dag=dag, owner="airflow") session = settings.Session() @@ -2831,7 +2830,7 @@ def test_dags_needing_dagruns_doesnot_send_dagmodel_with_import_errors(self, ses We check that has_import_error is false for dags being set to scheduler to create dagruns """ - dag = DAG(dag_id="test_dags", start_date=DEFAULT_DATE) + dag = DAG(dag_id="test_dags", schedule=None, start_date=DEFAULT_DATE) EmptyOperator(task_id="dummy", dag=dag, owner="airflow") orm_dag = DagModel( @@ -2863,7 +2862,7 @@ def test_dags_needing_dagruns_doesnot_send_dagmodel_with_import_errors(self, ses ], ) def test_relative_fileloc(self, fileloc, expected_relative): - dag = DAG(dag_id="test") + dag = DAG(dag_id="test", schedule=None) dag.fileloc = fileloc assert dag.relative_fileloc == expected_relative @@ -2890,7 +2889,7 @@ def test_relative_fileloc_serialized( serializer process. When the full path is not relative to the configured dags folder, then relative fileloc should just be the full path. """ - dag = DAG(dag_id="test") + dag = DAG(dag_id="test", schedule=None) dag.fileloc = fileloc sdm = SerializedDagModel(dag) session.add(sdm) @@ -2903,7 +2902,7 @@ def test_relative_fileloc_serialized( def test__processor_dags_folder(self, session): """Only populated after deserializtion""" - dag = DAG(dag_id="test") + dag = DAG(dag_id="test", schedule=None) dag.fileloc = "/abc/test.py" assert dag._processor_dags_folder is None sdm = SerializedDagModel(dag) @@ -2981,7 +2980,7 @@ def test_dataset_expression(self, session: Session) -> None: @mock.patch("airflow.models.dag.run_job") def test_dag_executors(self, run_job_mock): - dag = DAG(dag_id="test") + dag = DAG(dag_id="test", schedule=None) reload(executor_loader) with conf_vars({("core", "executor"): "SequentialExecutor"}): dag.run() @@ -3000,7 +2999,7 @@ def teardown_method(self) -> None: @pytest.mark.parametrize("tasks_count", [3, 12]) def test_count_number_queries(self, tasks_count): - dag = DAG("test_dagrun_query_count", start_date=DEFAULT_DATE) + dag = DAG("test_dagrun_query_count", schedule=None, start_date=DEFAULT_DATE) for i in range(tasks_count): EmptyOperator(task_id=f"dummy_task_{i}", owner="test", dag=dag) with assert_queries_count(3): @@ -3030,7 +3029,7 @@ def teardown_method(self): clear_db_runs() def test_fileloc(self): - @dag_decorator(default_args=self.DEFAULT_ARGS) + @dag_decorator(schedule=None, default_args=self.DEFAULT_ARGS) def noop_pipeline(): ... dag = noop_pipeline() @@ -3041,7 +3040,7 @@ def noop_pipeline(): ... def test_set_dag_id(self): """Test that checks you can set dag_id from decorator.""" - @dag_decorator("test", default_args=self.DEFAULT_ARGS) + @dag_decorator("test", schedule=None, default_args=self.DEFAULT_ARGS) def noop_pipeline(): ... dag = noop_pipeline() @@ -3051,7 +3050,7 @@ def noop_pipeline(): ... def test_default_dag_id(self): """Test that @dag uses function name as default dag id.""" - @dag_decorator(default_args=self.DEFAULT_ARGS) + @dag_decorator(schedule=None, default_args=self.DEFAULT_ARGS) def noop_pipeline(): ... dag = noop_pipeline() @@ -3068,7 +3067,7 @@ def noop_pipeline(): ... def test_documentation_added(self, dag_doc_md, expected_doc_md): """Test that @dag uses function docs as doc_md for DAG object if doc_md is not explicitly set.""" - @dag_decorator(default_args=self.DEFAULT_ARGS, doc_md=dag_doc_md) + @dag_decorator(schedule=None, default_args=self.DEFAULT_ARGS, doc_md=dag_doc_md) def noop_pipeline(): """Regular DAG documentation""" @@ -3080,7 +3079,7 @@ def noop_pipeline(): def test_documentation_template_rendered(self): """Test that @dag uses function docs as doc_md for DAG object""" - @dag_decorator(default_args=self.DEFAULT_ARGS) + @dag_decorator(schedule=None, default_args=self.DEFAULT_ARGS) def noop_pipeline(): """ {% if True %} @@ -3105,7 +3104,7 @@ def test_resolve_documentation_template_file_not_rendered(self, tmp_path): path = tmp_path / "testfile.md" path.write_text(raw_content) - @dag_decorator("test-dag", start_date=DEFAULT_DATE, doc_md=str(path)) + @dag_decorator("test-dag", schedule=None, start_date=DEFAULT_DATE, doc_md=str(path)) def markdown_docs(): ... dag = markdown_docs() @@ -3116,7 +3115,7 @@ def markdown_docs(): ... def test_fails_if_arg_not_set(self): """Test that @dag decorated function fails if positional argument is not set""" - @dag_decorator(default_args=self.DEFAULT_ARGS) + @dag_decorator(schedule=None, default_args=self.DEFAULT_ARGS) def noop_pipeline(value): @task_decorator def return_num(num): @@ -3131,7 +3130,7 @@ def return_num(num): def test_dag_param_resolves(self): """Test that dag param is correctly resolved by operator""" - @dag_decorator(default_args=self.DEFAULT_ARGS) + @dag_decorator(schedule=None, default_args=self.DEFAULT_ARGS) def xcom_pass_to_op(value=self.VALUE): @task_decorator def return_num(num): @@ -3157,7 +3156,7 @@ def return_num(num): def test_dag_param_dagrun_parameterized(self): """Test that dag param is correctly overwritten when set in dag run""" - @dag_decorator(default_args=self.DEFAULT_ARGS) + @dag_decorator(schedule=None, default_args=self.DEFAULT_ARGS) def xcom_pass_to_op(value=self.VALUE): @task_decorator def return_num(num): @@ -3187,7 +3186,7 @@ def return_num(num): def test_set_params_for_dag(self, value): """Test that dag param is correctly set when using dag decorator""" - @dag_decorator(default_args=self.DEFAULT_ARGS) + @dag_decorator(schedule=None, default_args=self.DEFAULT_ARGS) def xcom_pass_to_op(value=value): @task_decorator def return_num(num): @@ -3234,7 +3233,7 @@ def test_dag_schedule_interval_change_after_init(schedule_interval): @pytest.mark.parametrize("timetable", [NullTimetable(), OnceTimetable()]) def test_dag_timetable_change_after_init(timetable): - dag = DAG("my-dag") # Default is timedelta(days=1). + dag = DAG("my-dag", schedule=timedelta(days=1), start_date=DEFAULT_DATE) dag.timetable = timetable assert not dag._check_schedule_interval_matches_timetable() @@ -3634,7 +3633,12 @@ def test_get_next_data_interval( ], ) def test__time_restriction(dag_maker, dag_date, tasks_date, restrict): - with dag_maker("test__time_restriction", start_date=dag_date[0], end_date=dag_date[1]) as dag: + with dag_maker( + "test__time_restriction", + schedule=None, + start_date=dag_date[0], + end_date=dag_date[1], + ) as dag: EmptyOperator(task_id="do1", start_date=tasks_date[0][0], end_date=tasks_date[0][1]) EmptyOperator(task_id="do2", start_date=tasks_date[1][0], end_date=tasks_date[1][1]) @@ -3653,10 +3657,10 @@ def test__time_restriction(dag_maker, dag_date, tasks_date, restrict): ) def test__tags_length(tags: list[str], should_pass: bool): if should_pass: - DAG("test-dag", tags=tags) + DAG("test-dag", schedule=None, tags=tags) else: with pytest.raises(AirflowException): - DAG("test-dag", tags=tags) + DAG("test-dag", schedule=None, tags=tags) @pytest.mark.need_serialized_dag @@ -3748,12 +3752,12 @@ def test_create_dagrun_disallow_manual_to_use_automated_run_id(run_id_type: DagR def test_invalid_type_for_args(): with pytest.raises(TypeError): - DAG("invalid-default-args", max_consecutive_failed_dag_runs="not_an_int") + DAG("invalid-default-args", schedule=None, max_consecutive_failed_dag_runs="not_an_int") @mock.patch("airflow.models.dag.validate_instance_args") def test_dag_init_validates_arg_types(mock_validate_instance_args): - dag = DAG("dag_with_expected_args") + dag = DAG("dag_with_expected_args", schedule=None) mock_validate_instance_args.assert_called_once_with(dag, DAG_ARGS_EXPECTED_TYPES) @@ -3844,7 +3848,7 @@ def cleared_neither(task): ) def test_get_flat_relative_ids_with_setup(self): - with DAG(dag_id="test_dag", start_date=pendulum.now()) as dag: + with DAG(dag_id="test_dag", schedule=None, start_date=pendulum.now()) as dag: s1, w1, w2, w3, w4, t1 = self.make_tasks(dag, "s1, w1, w2, w3, w4, t1") s1 >> w1 >> w2 >> w3 @@ -3889,7 +3893,7 @@ def test_get_flat_relative_ids_with_setup(self): def test_get_flat_relative_ids_with_setup_nested_ctx_mgr(self): """Let's test some gnarlier cases here""" - with DAG(dag_id="test_dag", start_date=pendulum.now()) as dag: + with DAG(dag_id="test_dag", schedule=None, start_date=pendulum.now()) as dag: s1, t1, s2, t2 = self.make_tasks(dag, "s1, t1, s2, t2") with s1 >> t1: BaseOperator(task_id="w1") @@ -3900,7 +3904,7 @@ def test_get_flat_relative_ids_with_setup_nested_ctx_mgr(self): def test_get_flat_relative_ids_with_setup_nested_no_ctx_mgr(self): """Let's test some gnarlier cases here""" - with DAG(dag_id="test_dag", start_date=pendulum.now()) as dag: + with DAG(dag_id="test_dag", schedule=None, start_date=pendulum.now()) as dag: s1, t1, s2, t2, w1, w2, w3 = self.make_tasks(dag, "s1, t1, s2, t2, w1, w2, w3") s1 >> t1 s1 >> w1 >> t1 @@ -3925,7 +3929,7 @@ def test_get_flat_relative_ids_with_setup_nested_no_ctx_mgr(self): assert self.cleared_downstream(w3) == {s2, w3, t2} def test_get_flat_relative_ids_follows_teardowns(self): - with DAG(dag_id="test_dag", start_date=pendulum.now()) as dag: + with DAG(dag_id="test_dag", schedule=None, start_date=pendulum.now()) as dag: s1, w1, w2, t1 = self.make_tasks(dag, "s1, w1, w2, t1") s1 >> w1 >> [w2, t1] s1 >> t1 @@ -3943,7 +3947,7 @@ def test_get_flat_relative_ids_follows_teardowns(self): assert self.cleared_downstream(w1) == {s1, w1, w2, t1, s2} def test_get_flat_relative_ids_two_tasks_diff_setup_teardowns(self): - with DAG(dag_id="test_dag", start_date=pendulum.now()) as dag: + with DAG(dag_id="test_dag", schedule=None, start_date=pendulum.now()) as dag: s1, t1, s2, t2, w1, w2 = self.make_tasks(dag, "s1, t1, s2, t2, w1, w2") s1 >> w1 >> [w2, t1] s1 >> t1 @@ -3958,7 +3962,7 @@ def test_get_flat_relative_ids_two_tasks_diff_setup_teardowns(self): assert self.cleared_downstream(w2) == {s2, w2, t2} def test_get_flat_relative_ids_one_task_multiple_setup_teardowns(self): - with DAG(dag_id="test_dag", start_date=pendulum.now()) as dag: + with DAG(dag_id="test_dag", schedule=None, start_date=pendulum.now()) as dag: s1a, s1b, t1, s2, t2, s3, t3a, t3b, w1, w2 = self.make_tasks( dag, "s1a, s1b, t1, s2, t2, s3, t3a, t3b, w1, w2" ) @@ -3985,7 +3989,7 @@ def test_get_flat_relative_ids_with_setup_and_groups(self): When we do tg >> dag_teardown, teardowns should be excluded from tg leaves. """ - dag = DAG(dag_id="test_dag", start_date=pendulum.now()) + dag = DAG(dag_id="test_dag", schedule=None, start_date=pendulum.now()) with dag: dag_setup = BaseOperator(task_id="dag_setup").as_setup() dag_teardown = BaseOperator(task_id="dag_teardown").as_teardown() @@ -4053,7 +4057,7 @@ def test_clear_upstream_not_your_setup(self): before / while w2 runs. It just gets cleared by virtue of it being upstream, and that's what you requested. And its teardown gets cleared too. But w1 doesn't. """ - with DAG(dag_id="test_dag", start_date=pendulum.now()) as dag: + with DAG(dag_id="test_dag", schedule=None, start_date=pendulum.now()) as dag: s1, w1, w2, t1 = self.make_tasks(dag, "s1, w1, w2, t1") s1 >> w1 >> t1.as_teardown(setups=s1) s1 >> w2 @@ -4062,7 +4066,7 @@ def test_clear_upstream_not_your_setup(self): assert self.cleared_upstream(w2) == {s1, w2, t1} def test_clearing_teardown_no_clear_setup(self): - with DAG(dag_id="test_dag", start_date=pendulum.now()) as dag: + with DAG(dag_id="test_dag", schedule=None, start_date=pendulum.now()) as dag: s1, w1, t1 = self.make_tasks(dag, "s1, w1, t1") s1 >> t1 # clearing t1 does not clear s1 @@ -4074,7 +4078,7 @@ def test_clearing_teardown_no_clear_setup(self): assert self.cleared_downstream(w1) == {s1, w1, t1} def test_clearing_setup_clears_teardown(self): - with DAG(dag_id="test_dag", start_date=pendulum.now()) as dag: + with DAG(dag_id="test_dag", schedule=None, start_date=pendulum.now()) as dag: s1, w1, t1 = self.make_tasks(dag, "s1, w1, t1") s1 >> t1 s1 >> w1 >> t1 @@ -4097,7 +4101,7 @@ def test_clearing_setup_clears_teardown(self): ], ) def test_clearing_setup_clears_teardown_taskflow(self, upstream, downstream, expected): - with DAG(dag_id="test_dag", start_date=pendulum.now()) as dag: + with DAG(dag_id="test_dag", schedule=None, start_date=pendulum.now()) as dag: @setup def my_setup(): ... @@ -4121,7 +4125,7 @@ def my_teardown(): ... } == expected def test_get_flat_relative_ids_two_tasks_diff_setup_teardowns_deeper(self): - with DAG(dag_id="test_dag", start_date=pendulum.now()) as dag: + with DAG(dag_id="test_dag", schedule=None, start_date=pendulum.now()) as dag: s1, t1, s2, t2, w1, w2, s3, w3, t3 = self.make_tasks(dag, "s1, t1, s2, t2, w1, w2, s3, w3, t3") s1 >> w1 >> t1 s1 >> t1 @@ -4147,7 +4151,7 @@ def test_get_flat_relative_ids_two_tasks_diff_setup_teardowns_deeper(self): assert self.cleared_downstream(w1) == {s1, w1, t1, s2, w2, t2, t3} def test_clearing_behavior_multiple_setups_for_work_task(self): - with DAG(dag_id="test_dag", start_date=pendulum.now()) as dag: + with DAG(dag_id="test_dag", schedule=None, start_date=pendulum.now()) as dag: s1, t1, s2, t2, w1, w2, s3, w3, t3 = self.make_tasks(dag, "s1, t1, s2, t2, w1, w2, s3, w3, t3") s1 >> t1 s2 >> t2 @@ -4166,7 +4170,7 @@ def test_clearing_behavior_multiple_setups_for_work_task(self): assert self.cleared_neither(s2) == {s2, t2} def test_clearing_behavior_multiple_setups_for_work_task2(self): - with DAG(dag_id="test_dag", start_date=pendulum.now()) as dag: + with DAG(dag_id="test_dag", schedule=None, start_date=pendulum.now()) as dag: s1, t1, s2, t2, w1, w2, s3, w3, t3 = self.make_tasks(dag, "s1, t1, s2, t2, w1, w2, s3, w3, t3") s1 >> t1 s2 >> t2 @@ -4177,7 +4181,7 @@ def test_clearing_behavior_multiple_setups_for_work_task2(self): assert self.cleared_downstream(w2) == {s1, s2, s3, w2, t1, t2, t3} def test_clearing_behavior_more_tertiary_weirdness(self): - with DAG(dag_id="test_dag", start_date=pendulum.now()) as dag: + with DAG(dag_id="test_dag", schedule=None, start_date=pendulum.now()) as dag: s1, t1, s2, t2, w1, w2, s3, t3 = self.make_tasks(dag, "s1, t1, s2, t2, w1, w2, s3, t3") s1 >> t1 s2 >> t2 @@ -4208,7 +4212,7 @@ def sort(task_list): assert set(w2.get_upstreams_only_setups_and_teardowns()) == {s2, t2, s1, t1, t3} def test_clearing_behavior_more_tertiary_weirdness2(self): - with DAG(dag_id="test_dag", start_date=pendulum.now()) as dag: + with DAG(dag_id="test_dag", schedule=None, start_date=pendulum.now()) as dag: s1, t1, s2, t2, w1, w2, s3, t3 = self.make_tasks(dag, "s1, t1, s2, t2, w1, w2, s3, t3") s1 >> t1 s2 >> t2 @@ -4237,7 +4241,7 @@ def sort(task_list): assert self.cleared_upstream(t1) == {s1, t1, s2, t2, w1} def test_clearing_behavior_just_teardown(self): - with DAG(dag_id="test_dag", start_date=pendulum.now()) as dag: + with DAG(dag_id="test_dag", schedule=None, start_date=pendulum.now()) as dag: s1, t1 = self.make_tasks(dag, "s1, t1") s1 >> t1 assert set(t1.get_upstreams_only_setups_and_teardowns()) == set() diff --git a/tests/models/test_dagbag.py b/tests/models/test_dagbag.py index 5ace985adde5d..60b33c06fc584 100644 --- a/tests/models/test_dagbag.py +++ b/tests/models/test_dagbag.py @@ -161,7 +161,7 @@ def test_process_file_duplicated_dag_id(self, tmp_path): def create_dag(): from airflow.decorators import dag - @dag(default_args={"owner": "owner1"}) + @dag(schedule=None, default_args={"owner": "owner1"}) def my_flow(): pass @@ -520,7 +520,7 @@ def basic_cycle(): dag_name = "cycle_dag" default_args = {"owner": "owner1", "start_date": datetime.datetime(2016, 1, 1)} - dag = DAG(dag_name, default_args=default_args) + dag = DAG(dag_name, schedule=timedelta(days=1), default_args=default_args) # A -> A with dag: diff --git a/tests/models/test_dagrun.py b/tests/models/test_dagrun.py index 6d0ef33bc5dca..0117103dbbb6c 100644 --- a/tests/models/test_dagrun.py +++ b/tests/models/test_dagrun.py @@ -124,7 +124,7 @@ def create_dag_run( def test_clear_task_instances_for_backfill_unfinished_dagrun(self, state, session): now = timezone.utcnow() dag_id = "test_clear_task_instances_for_backfill_dagrun" - dag = DAG(dag_id=dag_id, start_date=now) + dag = DAG(dag_id=dag_id, schedule=datetime.timedelta(days=1), start_date=now) dag_run = self.create_dag_run(dag, execution_date=now, is_backfill=True, state=state, session=session) task0 = EmptyOperator(task_id="backfill_task_0", owner="test", dag=dag) @@ -143,7 +143,7 @@ def test_clear_task_instances_for_backfill_unfinished_dagrun(self, state, sessio def test_clear_task_instances_for_backfill_finished_dagrun(self, state, session): now = timezone.utcnow() dag_id = "test_clear_task_instances_for_backfill_dagrun" - dag = DAG(dag_id=dag_id, start_date=now) + dag = DAG(dag_id=dag_id, schedule=datetime.timedelta(days=1), start_date=now) dag_run = self.create_dag_run(dag, execution_date=now, is_backfill=True, state=state, session=session) task0 = EmptyOperator(task_id="backfill_task_0", owner="test", dag=dag) @@ -222,7 +222,11 @@ def test_dagrun_success_when_all_skipped(self, session): """ Tests that a DAG run succeeds when all tasks are skipped """ - dag = DAG(dag_id="test_dagrun_success_when_all_skipped", start_date=timezone.datetime(2017, 1, 1)) + dag = DAG( + dag_id="test_dagrun_success_when_all_skipped", + schedule=datetime.timedelta(days=1), + start_date=timezone.datetime(2017, 1, 1), + ) dag_task1 = ShortCircuitOperator( task_id="test_short_circuit_false", dag=dag, python_callable=lambda: False ) @@ -245,7 +249,11 @@ def test_dagrun_not_stuck_in_running_when_all_tasks_instances_are_removed(self, """ Tests that a DAG run succeeds when all tasks are removed """ - dag = DAG(dag_id="test_dagrun_success_when_all_skipped", start_date=timezone.datetime(2017, 1, 1)) + dag = DAG( + dag_id="test_dagrun_success_when_all_skipped", + schedule=datetime.timedelta(days=1), + start_date=timezone.datetime(2017, 1, 1), + ) dag_task1 = ShortCircuitOperator( task_id="test_short_circuit_false", dag=dag, python_callable=lambda: False ) @@ -265,7 +273,12 @@ def test_dagrun_not_stuck_in_running_when_all_tasks_instances_are_removed(self, assert DagRunState.SUCCESS == dag_run.state def test_dagrun_success_conditions(self, session): - dag = DAG("test_dagrun_success_conditions", start_date=DEFAULT_DATE, default_args={"owner": "owner1"}) + dag = DAG( + "test_dagrun_success_conditions", + schedule=datetime.timedelta(days=1), + start_date=DEFAULT_DATE, + default_args={"owner": "owner1"}, + ) # A -> B # A -> C -> D @@ -309,7 +322,12 @@ def test_dagrun_success_conditions(self, session): assert DagRunState.SUCCESS == dr.state def test_dagrun_deadlock(self, session): - dag = DAG("text_dagrun_deadlock", start_date=DEFAULT_DATE, default_args={"owner": "owner1"}) + dag = DAG( + "text_dagrun_deadlock", + schedule=datetime.timedelta(days=1), + start_date=DEFAULT_DATE, + default_args={"owner": "owner1"}, + ) with dag: op1 = EmptyOperator(task_id="A") @@ -342,7 +360,11 @@ def test_dagrun_deadlock(self, session): assert dr.state == DagRunState.FAILED def test_dagrun_no_deadlock_with_restarting(self, session): - dag = DAG("test_dagrun_no_deadlock_with_restarting", start_date=DEFAULT_DATE) + dag = DAG( + "test_dagrun_no_deadlock_with_restarting", + schedule=datetime.timedelta(days=1), + start_date=DEFAULT_DATE, + ) with dag: op1 = EmptyOperator(task_id="upstream_task") op2 = EmptyOperator(task_id="downstream_task") @@ -362,7 +384,7 @@ def test_dagrun_no_deadlock_with_restarting(self, session): assert dr.state == DagRunState.RUNNING def test_dagrun_no_deadlock_with_depends_on_past(self, session): - dag = DAG("test_dagrun_no_deadlock", start_date=DEFAULT_DATE) + dag = DAG("test_dagrun_no_deadlock", schedule=datetime.timedelta(days=1), start_date=DEFAULT_DATE) with dag: EmptyOperator(task_id="dop", depends_on_past=True) EmptyOperator(task_id="tc", max_active_tis_per_dag=1) @@ -405,6 +427,7 @@ def on_success_callable(context): dag = DAG( dag_id="test_dagrun_success_callback", + schedule=datetime.timedelta(days=1), start_date=datetime.datetime(2017, 1, 1), on_success_callback=on_success_callable, ) @@ -432,6 +455,7 @@ def on_failure_callable(context): dag = DAG( dag_id="test_dagrun_failure_callback", + schedule=datetime.timedelta(days=1), start_date=datetime.datetime(2017, 1, 1), on_failure_callback=on_failure_callable, ) @@ -459,6 +483,7 @@ def on_success_callable(context): dag = DAG( dag_id="test_dagrun_update_state_with_handle_callback_success", + schedule=datetime.timedelta(days=1), start_date=datetime.datetime(2017, 1, 1), on_success_callback=on_success_callable, ) @@ -497,6 +522,7 @@ def on_failure_callable(context): dag = DAG( dag_id="test_dagrun_update_state_with_handle_callback_failure", + schedule=datetime.timedelta(days=1), start_date=datetime.datetime(2017, 1, 1), on_failure_callback=on_failure_callable, ) @@ -530,7 +556,12 @@ def on_failure_callable(context): ) def test_dagrun_set_state_end_date(self, session): - dag = DAG("test_dagrun_set_state_end_date", start_date=DEFAULT_DATE, default_args={"owner": "owner1"}) + dag = DAG( + "test_dagrun_set_state_end_date", + schedule=datetime.timedelta(days=1), + start_date=DEFAULT_DATE, + default_args={"owner": "owner1"}, + ) dag.clear() @@ -576,7 +607,10 @@ def test_dagrun_set_state_end_date(self, session): def test_dagrun_update_state_end_date(self, session): dag = DAG( - "test_dagrun_update_state_end_date", start_date=DEFAULT_DATE, default_args={"owner": "owner1"} + "test_dagrun_update_state_end_date", + schedule=datetime.timedelta(days=1), + start_date=DEFAULT_DATE, + default_args={"owner": "owner1"}, ) # A -> B @@ -637,7 +671,11 @@ def test_get_task_instance_on_empty_dagrun(self, session): """ Make sure that a proper value is returned when a dagrun has no task instances """ - dag = DAG(dag_id="test_get_task_instance_on_empty_dagrun", start_date=timezone.datetime(2017, 1, 1)) + dag = DAG( + dag_id="test_get_task_instance_on_empty_dagrun", + schedule=datetime.timedelta(days=1), + start_date=timezone.datetime(2017, 1, 1), + ) ShortCircuitOperator(task_id="test_short_circuit_false", dag=dag, python_callable=lambda: False) now = timezone.utcnow() @@ -660,7 +698,7 @@ def test_get_task_instance_on_empty_dagrun(self, session): assert ti is None def test_get_latest_runs(self, session): - dag = DAG(dag_id="test_latest_runs_1", start_date=DEFAULT_DATE) + dag = DAG(dag_id="test_latest_runs_1", schedule=datetime.timedelta(days=1), start_date=DEFAULT_DATE) self.create_dag_run(dag, execution_date=timezone.datetime(2015, 1, 1), session=session) self.create_dag_run(dag, execution_date=timezone.datetime(2015, 1, 2), session=session) dagruns = DagRun.get_latest_runs(session) @@ -671,9 +709,9 @@ def test_get_latest_runs(self, session): def test_removed_task_instances_can_be_restored(self, session): def with_all_tasks_removed(dag): - return DAG(dag_id=dag.dag_id, start_date=dag.start_date) + return DAG(dag_id=dag.dag_id, schedule=datetime.timedelta(days=1), start_date=dag.start_date) - dag = DAG("test_task_restoration", start_date=DEFAULT_DATE) + dag = DAG("test_task_restoration", schedule=datetime.timedelta(days=1), start_date=DEFAULT_DATE) dag.add_task(EmptyOperator(task_id="flaky_task", owner="test")) dagrun = self.create_dag_run(dag, session=session) @@ -694,7 +732,7 @@ def with_all_tasks_removed(dag): assert flaky_ti.state is None def test_already_added_task_instances_can_be_ignored(self, session): - dag = DAG("triggered_dag", start_date=DEFAULT_DATE) + dag = DAG("triggered_dag", schedule=datetime.timedelta(days=1), start_date=DEFAULT_DATE) dag.add_task(EmptyOperator(task_id="first_task", owner="test")) dagrun = self.create_dag_run(dag, session=session) @@ -723,7 +761,11 @@ def mutate_task_instance(task_instance): mock_hook.side_effect = mutate_task_instance - dag = DAG("test_task_instance_mutation_hook", start_date=DEFAULT_DATE) + dag = DAG( + "test_task_instance_mutation_hook", + schedule=datetime.timedelta(days=1), + start_date=DEFAULT_DATE, + ) dag.add_task(EmptyOperator(task_id="task_to_mutate", owner="test", queue="queue1")) dagrun = self.create_dag_run(dag, session=session) @@ -822,7 +864,7 @@ def test_next_dagruns_to_examine_only_unpaused(self, session, state): and gets running/queued dagruns """ - dag = DAG(dag_id="test_dags", start_date=DEFAULT_DATE) + dag = DAG(dag_id="test_dags", schedule=datetime.timedelta(days=1), start_date=DEFAULT_DATE) EmptyOperator(task_id="dummy", dag=dag, owner="airflow") orm_dag = DagModel( @@ -859,7 +901,7 @@ def test_no_scheduling_delay_for_nonscheduled_runs(self, stats_mock, session): Tests that dag scheduling delay stat is not called if the dagrun is not a scheduled run. This case is manual run. Simple test for coherence check. """ - dag = DAG(dag_id="test_dagrun_stats", start_date=DEFAULT_DATE) + dag = DAG(dag_id="test_dagrun_stats", schedule=datetime.timedelta(days=1), start_date=DEFAULT_DATE) dag_task = EmptyOperator(task_id="dummy", dag=dag) initial_task_states = { @@ -942,7 +984,7 @@ def test_states_sets(self, session): """ Tests that adding State.failed_states and State.success_states work as expected. """ - dag = DAG(dag_id="test_dagrun_states", start_date=DEFAULT_DATE) + dag = DAG(dag_id="test_dagrun_states", schedule=datetime.timedelta(days=1), start_date=DEFAULT_DATE) dag_task_success = EmptyOperator(task_id="dummy", dag=dag) dag_task_failed = EmptyOperator(task_id="dummy2", dag=dag) @@ -968,7 +1010,7 @@ def test_states_sets(self, session): def test_verify_integrity_task_start_and_end_date(Stats_incr, session, run_type, expected_tis): """Test that tasks with specific dates are only created for backfill runs""" - with DAG("test", start_date=DEFAULT_DATE) as dag: + with DAG("test", schedule=datetime.timedelta(days=1), start_date=DEFAULT_DATE) as dag: EmptyOperator(task_id="without") EmptyOperator(task_id="with_start_date", start_date=DEFAULT_DATE + datetime.timedelta(1)) EmptyOperator(task_id="with_end_date", end_date=DEFAULT_DATE - datetime.timedelta(1)) diff --git a/tests/models/test_mappedoperator.py b/tests/models/test_mappedoperator.py index 2ee597879064c..2b0cd50165c45 100644 --- a/tests/models/test_mappedoperator.py +++ b/tests/models/test_mappedoperator.py @@ -52,7 +52,7 @@ def test_task_mapping_with_dag(): - with DAG("test-dag", start_date=DEFAULT_DATE) as dag: + with DAG("test-dag", schedule=None, start_date=DEFAULT_DATE) as dag: task1 = BaseOperator(task_id="op1") literal = ["a", "b", "c"] mapped = MockOperator.partial(task_id="task_2").expand(arg2=literal) @@ -87,7 +87,7 @@ def __init__(self, arg, **kwargs): def execute(self, context: Context): pass - with DAG("test-dag", start_date=DEFAULT_DATE) as dag: + with DAG("test-dag", schedule=None, start_date=DEFAULT_DATE) as dag: task1 = CustomOperator(task_id="op1", arg=None) unrenderable_values = [UnrenderableClass(), UnrenderableClass()] mapped = CustomOperator.partial(task_id="task_2").expand(arg=unrenderable_values) @@ -101,7 +101,7 @@ def execute(self, context: Context): def test_task_mapping_without_dag_context(): - with DAG("test-dag", start_date=DEFAULT_DATE) as dag: + with DAG("test-dag", schedule=None, start_date=DEFAULT_DATE) as dag: task1 = BaseOperator(task_id="op1") literal = ["a", "b", "c"] mapped = MockOperator.partial(task_id="task_2").expand(arg2=literal) @@ -118,7 +118,7 @@ def test_task_mapping_without_dag_context(): def test_task_mapping_default_args(): default_args = {"start_date": DEFAULT_DATE.now(), "owner": "test"} - with DAG("test-dag", start_date=DEFAULT_DATE, default_args=default_args): + with DAG("test-dag", schedule=None, start_date=DEFAULT_DATE, default_args=default_args): task1 = BaseOperator(task_id="op1") literal = ["a", "b", "c"] mapped = MockOperator.partial(task_id="task_2").expand(arg2=literal) @@ -131,7 +131,7 @@ def test_task_mapping_default_args(): def test_task_mapping_override_default_args(): default_args = {"retries": 2, "start_date": DEFAULT_DATE.now()} - with DAG("test-dag", start_date=DEFAULT_DATE, default_args=default_args): + with DAG("test-dag", schedule=None, start_date=DEFAULT_DATE, default_args=default_args): literal = ["a", "b", "c"] mapped = MockOperator.partial(task_id="task", retries=1).expand(arg2=literal) @@ -150,7 +150,7 @@ def test_map_unknown_arg_raises(): def test_map_xcom_arg(): """Test that dependencies are correct when mapping with an XComArg""" - with DAG("test-dag", start_date=DEFAULT_DATE): + with DAG("test-dag", schedule=None, start_date=DEFAULT_DATE): task1 = BaseOperator(task_id="op1") mapped = MockOperator.partial(task_id="task_2").expand(arg2=task1.output) finish = MockOperator(task_id="finish") @@ -803,7 +803,7 @@ def execute(self, context): def test_task_mapping_with_task_group_context(): - with DAG("test-dag", start_date=DEFAULT_DATE) as dag: + with DAG("test-dag", schedule=None, start_date=DEFAULT_DATE) as dag: task1 = BaseOperator(task_id="op1") finish = MockOperator(task_id="finish") @@ -824,7 +824,7 @@ def test_task_mapping_with_task_group_context(): def test_task_mapping_with_explicit_task_group(): - with DAG("test-dag", start_date=DEFAULT_DATE) as dag: + with DAG("test-dag", schedule=None, start_date=DEFAULT_DATE) as dag: task1 = BaseOperator(task_id="op1") finish = MockOperator(task_id="finish") diff --git a/tests/models/test_serialized_dag.py b/tests/models/test_serialized_dag.py index 83da1916378df..f86aa1b904672 100644 --- a/tests/models/test_serialized_dag.py +++ b/tests/models/test_serialized_dag.py @@ -184,9 +184,9 @@ def test_remove_dags_by_filepath(self): @pytest.mark.skip_if_database_isolation_mode # Does not work in db isolation mode def test_bulk_sync_to_db(self): dags = [ - DAG("dag_1"), - DAG("dag_2"), - DAG("dag_3"), + DAG("dag_1", schedule=None), + DAG("dag_2", schedule=None), + DAG("dag_3", schedule=None), ] with assert_queries_count(10): SDM.bulk_sync_to_db(dags) diff --git a/tests/models/test_taskinstance.py b/tests/models/test_taskinstance.py index 2d3da6a413e2a..d7add80fcc40a 100644 --- a/tests/models/test_taskinstance.py +++ b/tests/models/test_taskinstance.py @@ -420,7 +420,7 @@ def test_pool_slots_property(self): test that try to create a task with pool_slots less than 1 """ - dag = DAG(dag_id="test_run_pooling_task") + dag = DAG(dag_id="test_run_pooling_task", schedule=None) with pytest.raises(ValueError, match="pool slots .* cannot be less than 1"): EmptyOperator( task_id="test_run_pooling_task_op", diff --git a/tests/operators/test_generic_transfer.py b/tests/operators/test_generic_transfer.py index 7f9fd07da171d..c877d7bed99cd 100644 --- a/tests/operators/test_generic_transfer.py +++ b/tests/operators/test_generic_transfer.py @@ -39,7 +39,7 @@ class TestMySql: def setup_method(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE} - dag = DAG(TEST_DAG_ID, default_args=args) + dag = DAG(TEST_DAG_ID, schedule=None, default_args=args) self.dag = dag def teardown_method(self): diff --git a/tests/providers/amazon/aws/log/test_cloudwatch_task_handler.py b/tests/providers/amazon/aws/log/test_cloudwatch_task_handler.py index 36a51bbb745c4..f1870c4d76db1 100644 --- a/tests/providers/amazon/aws/log/test_cloudwatch_task_handler.py +++ b/tests/providers/amazon/aws/log/test_cloudwatch_task_handler.py @@ -66,7 +66,7 @@ def setup_tests(self, create_log_template, tmp_path_factory, session): date = datetime(2020, 1, 1) dag_id = "dag_for_testing_cloudwatch_task_handler" task_id = "task_for_testing_cloudwatch_log_handler" - self.dag = DAG(dag_id=dag_id, start_date=date) + self.dag = DAG(dag_id=dag_id, schedule=None, start_date=date) task = EmptyOperator(task_id=task_id, dag=self.dag) dag_run = DagRun(dag_id=self.dag.dag_id, execution_date=date, run_id="test", run_type="scheduled") session.add(dag_run) diff --git a/tests/providers/amazon/aws/log/test_s3_task_handler.py b/tests/providers/amazon/aws/log/test_s3_task_handler.py index 7bec2871052c5..3412011eb4ef4 100644 --- a/tests/providers/amazon/aws/log/test_s3_task_handler.py +++ b/tests/providers/amazon/aws/log/test_s3_task_handler.py @@ -57,7 +57,7 @@ def setup_tests(self, create_log_template, tmp_path_factory, session): assert self.s3_task_handler.hook is not None date = datetime(2016, 1, 1) - self.dag = DAG("dag_for_testing_s3_task_handler", start_date=date) + self.dag = DAG("dag_for_testing_s3_task_handler", schedule=None, start_date=date) task = EmptyOperator(task_id="task_for_testing_s3_log_handler", dag=self.dag) dag_run = DagRun(dag_id=self.dag.dag_id, execution_date=date, run_id="test", run_type="manual") session.add(dag_run) diff --git a/tests/providers/amazon/aws/operators/test_cloud_formation.py b/tests/providers/amazon/aws/operators/test_cloud_formation.py index 071ba5c847040..5de02c3622cfb 100644 --- a/tests/providers/amazon/aws/operators/test_cloud_formation.py +++ b/tests/providers/amazon/aws/operators/test_cloud_formation.py @@ -78,7 +78,7 @@ def test_create_stack(self, mocked_hook_client): task_id="test_task", stack_name=stack_name, cloudformation_parameters={"TimeoutInMinutes": timeout, "TemplateBody": template_body}, - dag=DAG("test_dag_id", default_args=DEFAULT_ARGS), + dag=DAG("test_dag_id", schedule=None, default_args=DEFAULT_ARGS), ) operator.execute(MagicMock()) @@ -119,7 +119,7 @@ def test_delete_stack(self, mocked_hook_client): operator = CloudFormationDeleteStackOperator( task_id="test_task", stack_name=stack_name, - dag=DAG("test_dag_id", default_args=DEFAULT_ARGS), + dag=DAG("test_dag_id", schedule=None, default_args=DEFAULT_ARGS), ) operator.execute(MagicMock()) diff --git a/tests/providers/amazon/aws/operators/test_emr_add_steps.py b/tests/providers/amazon/aws/operators/test_emr_add_steps.py index 9a17d3a751d56..9ee99864e00e3 100644 --- a/tests/providers/amazon/aws/operators/test_emr_add_steps.py +++ b/tests/providers/amazon/aws/operators/test_emr_add_steps.py @@ -68,7 +68,7 @@ def setup_method(self): job_flow_id="j-8989898989", aws_conn_id="aws_default", steps=self._config, - dag=DAG("test_dag_id", default_args=self.args), + dag=DAG("test_dag_id", schedule=None, default_args=self.args), ) def test_init(self): @@ -132,6 +132,7 @@ def test_render_template(self, session, clean_dags_and_dagruns): def test_render_template_from_file(self, mocked_hook_client, session, clean_dags_and_dagruns): dag = DAG( dag_id="test_file", + schedule=None, default_args=self.args, template_searchpath=TEMPLATE_SEARCHPATH, template_undefined=StrictUndefined, @@ -188,7 +189,7 @@ def test_init_with_cluster_name(self, mocked_hook_client): job_flow_name="test_cluster", cluster_states=["RUNNING", "WAITING"], aws_conn_id="aws_default", - dag=DAG("test_dag_id", default_args=self.args), + dag=DAG("test_dag_id", schedule=None, default_args=self.args), ) with patch( @@ -207,7 +208,7 @@ def test_init_with_nonexistent_cluster_name(self): job_flow_name=cluster_name, cluster_states=["RUNNING", "WAITING"], aws_conn_id="aws_default", - dag=DAG("test_dag_id", default_args=self.args), + dag=DAG("test_dag_id", schedule=None, default_args=self.args), ) with patch( @@ -223,7 +224,7 @@ def test_wait_for_completion(self, mocked_hook_client): task_id="test_task", job_flow_id=job_flow_id, aws_conn_id="aws_default", - dag=DAG("test_dag_id", default_args=self.args), + dag=DAG("test_dag_id", schedule=None, default_args=self.args), wait_for_completion=False, ) @@ -247,7 +248,7 @@ def test_wait_for_completion_false_with_deferrable(self): task_id="test_task", job_flow_id=job_flow_id, aws_conn_id="aws_default", - dag=DAG("test_dag_id", default_args=self.args), + dag=DAG("test_dag_id", schedule=None, default_args=self.args), wait_for_completion=True, deferrable=True, ) @@ -264,7 +265,7 @@ def test_emr_add_steps_deferrable(self, mock_add_job_flow_steps, mock_get_log_ur task_id="test_task", job_flow_id=job_flow_id, aws_conn_id="aws_default", - dag=DAG("test_dag_id", default_args=self.args), + dag=DAG("test_dag_id", schedule=None, default_args=self.args), wait_for_completion=True, deferrable=True, ) diff --git a/tests/providers/amazon/aws/operators/test_emr_create_job_flow.py b/tests/providers/amazon/aws/operators/test_emr_create_job_flow.py index 73b7e090b8b2b..204d292c67b46 100644 --- a/tests/providers/amazon/aws/operators/test_emr_create_job_flow.py +++ b/tests/providers/amazon/aws/operators/test_emr_create_job_flow.py @@ -81,6 +81,7 @@ def setup_method(self): region_name="ap-southeast-2", dag=DAG( TEST_DAG_ID, + schedule=None, default_args=args, template_searchpath=TEMPLATE_SEARCHPATH, template_undefined=StrictUndefined, diff --git a/tests/providers/amazon/aws/operators/test_emr_modify_cluster.py b/tests/providers/amazon/aws/operators/test_emr_modify_cluster.py index 98d8ba9989be9..6dada442ff79f 100644 --- a/tests/providers/amazon/aws/operators/test_emr_modify_cluster.py +++ b/tests/providers/amazon/aws/operators/test_emr_modify_cluster.py @@ -47,7 +47,7 @@ def setup_method(self): cluster_id="j-8989898989", step_concurrency_level=1, aws_conn_id="aws_default", - dag=DAG("test_dag_id", default_args=args), + dag=DAG("test_dag_id", schedule=None, default_args=args), ) def test_init(self): diff --git a/tests/providers/amazon/aws/operators/test_rds.py b/tests/providers/amazon/aws/operators/test_rds.py index 651db53d42955..b8eeb09963a96 100644 --- a/tests/providers/amazon/aws/operators/test_rds.py +++ b/tests/providers/amazon/aws/operators/test_rds.py @@ -146,7 +146,11 @@ class TestBaseRdsOperator: @classmethod def setup_class(cls): - cls.dag = DAG("test_dag", default_args={"owner": "airflow", "start_date": DEFAULT_DATE}) + cls.dag = DAG( + dag_id="test_dag", + schedule=None, + default_args={"owner": "airflow", "start_date": DEFAULT_DATE}, + ) cls.op = RdsBaseOperator(task_id="test_task", aws_conn_id="aws_default", dag=cls.dag) @classmethod @@ -162,7 +166,11 @@ def test_hook_attribute(self): class TestRdsCreateDbSnapshotOperator: @classmethod def setup_class(cls): - cls.dag = DAG("test_dag", default_args={"owner": "airflow", "start_date": DEFAULT_DATE}) + cls.dag = DAG( + dag_id="test_dag", + schedule=None, + default_args={"owner": "airflow", "start_date": DEFAULT_DATE}, + ) cls.hook = RdsHook(aws_conn_id=AWS_CONN, region_name="us-east-1") _patch_hook_get_connection(cls.hook) @@ -261,7 +269,11 @@ def test_create_db_cluster_snapshot_no_wait(self, mock_wait): class TestRdsCopyDbSnapshotOperator: @classmethod def setup_class(cls): - cls.dag = DAG("test_dag", default_args={"owner": "airflow", "start_date": DEFAULT_DATE}) + cls.dag = DAG( + dag_id="test_dag", + schedule=None, + default_args={"owner": "airflow", "start_date": DEFAULT_DATE}, + ) cls.hook = RdsHook(aws_conn_id=AWS_CONN, region_name="us-east-1") _patch_hook_get_connection(cls.hook) @@ -367,7 +379,11 @@ def test_copy_db_cluster_snapshot_no_wait(self, mock_await_status): class TestRdsDeleteDbSnapshotOperator: @classmethod def setup_class(cls): - cls.dag = DAG("test_dag", default_args={"owner": "airflow", "start_date": DEFAULT_DATE}) + cls.dag = DAG( + dag_id="test_dag", + schedule=None, + default_args={"owner": "airflow", "start_date": DEFAULT_DATE}, + ) cls.hook = RdsHook(aws_conn_id=AWS_CONN, region_name="us-east-1") _patch_hook_get_connection(cls.hook) @@ -470,7 +486,11 @@ def test_delete_db_cluster_snapshot_no_wait(self): class TestRdsStartExportTaskOperator: @classmethod def setup_class(cls): - cls.dag = DAG("test_dag", default_args={"owner": "airflow", "start_date": DEFAULT_DATE}) + cls.dag = DAG( + dag_id="test_dag", + schedule=None, + default_args={"owner": "airflow", "start_date": DEFAULT_DATE}, + ) cls.hook = RdsHook(aws_conn_id=AWS_CONN, region_name="us-east-1") _patch_hook_get_connection(cls.hook) @@ -536,7 +556,11 @@ def test_start_export_task_no_wait(self, mock_await_status): class TestRdsCancelExportTaskOperator: @classmethod def setup_class(cls): - cls.dag = DAG("test_dag", default_args={"owner": "airflow", "start_date": DEFAULT_DATE}) + cls.dag = DAG( + dag_id="test_dag", + schedule=None, + default_args={"owner": "airflow", "start_date": DEFAULT_DATE}, + ) cls.hook = RdsHook(aws_conn_id=AWS_CONN, region_name="us-east-1") _patch_hook_get_connection(cls.hook) @@ -596,7 +620,11 @@ def test_cancel_export_task_no_wait(self, mock_await_status): class TestRdsCreateEventSubscriptionOperator: @classmethod def setup_class(cls): - cls.dag = DAG("test_dag", default_args={"owner": "airflow", "start_date": DEFAULT_DATE}) + cls.dag = DAG( + dag_id="test_dag", + schedule=None, + default_args={"owner": "airflow", "start_date": DEFAULT_DATE}, + ) cls.hook = RdsHook(aws_conn_id=AWS_CONN, region_name="us-east-1") _patch_hook_get_connection(cls.hook) @@ -658,7 +686,11 @@ def test_create_event_subscription_no_wait(self, mock_await_status): class TestRdsDeleteEventSubscriptionOperator: @classmethod def setup_class(cls): - cls.dag = DAG("test_dag", default_args={"owner": "airflow", "start_date": DEFAULT_DATE}) + cls.dag = DAG( + dag_id="test_dag", + schedule=None, + default_args={"owner": "airflow", "start_date": DEFAULT_DATE}, + ) cls.hook = RdsHook(aws_conn_id=AWS_CONN, region_name="us-east-1") _patch_hook_get_connection(cls.hook) @@ -687,7 +719,11 @@ def test_delete_event_subscription(self): class TestRdsCreateDbInstanceOperator: @classmethod def setup_class(cls): - cls.dag = DAG("test_dag", default_args={"owner": "airflow", "start_date": DEFAULT_DATE}) + cls.dag = DAG( + dag_id="test_dag", + schedule=None, + default_args={"owner": "airflow", "start_date": DEFAULT_DATE}, + ) cls.hook = RdsHook(aws_conn_id=AWS_CONN, region_name="us-east-1") _patch_hook_get_connection(cls.hook) @@ -749,7 +785,11 @@ def test_create_db_instance_no_wait(self, mock_await_status): class TestRdsDeleteDbInstanceOperator: @classmethod def setup_class(cls): - cls.dag = DAG("test_dag", default_args={"owner": "airflow", "start_date": DEFAULT_DATE}) + cls.dag = DAG( + dag_id="test_dag", + schedule=None, + default_args={"owner": "airflow", "start_date": DEFAULT_DATE}, + ) cls.hook = RdsHook(aws_conn_id=AWS_CONN, region_name="us-east-1") _patch_hook_get_connection(cls.hook) @@ -803,7 +843,11 @@ def test_delete_db_instance_no_wait(self, mock_await_status): class TestRdsStopDbOperator: @classmethod def setup_class(cls): - cls.dag = DAG("test_dag", default_args={"owner": "airflow", "start_date": DEFAULT_DATE}) + cls.dag = DAG( + dag_id="test_dag", + schedule=None, + default_args={"owner": "airflow", "start_date": DEFAULT_DATE}, + ) cls.hook = RdsHook(aws_conn_id=AWS_CONN, region_name="us-east-1") _patch_hook_get_connection(cls.hook) @@ -903,7 +947,11 @@ def test_stop_db_cluster_create_snapshot_logs_warning_message(self, caplog): class TestRdsStartDbOperator: @classmethod def setup_class(cls): - cls.dag = DAG("test_dag", default_args={"owner": "airflow", "start_date": DEFAULT_DATE}) + cls.dag = DAG( + dag_id="test_dag", + schedule=None, + default_args={"owner": "airflow", "start_date": DEFAULT_DATE}, + ) cls.hook = RdsHook(aws_conn_id=AWS_CONN, region_name="us-east-1") _patch_hook_get_connection(cls.hook) diff --git a/tests/providers/amazon/aws/operators/test_sagemaker_base.py b/tests/providers/amazon/aws/operators/test_sagemaker_base.py index 9da377e89f509..25e7ff9c2a443 100644 --- a/tests/providers/amazon/aws/operators/test_sagemaker_base.py +++ b/tests/providers/amazon/aws/operators/test_sagemaker_base.py @@ -169,7 +169,7 @@ def test_create_experiment(self, conn_mock, session, clean_dags_and_dagruns): # putting a DAG around the operator so that jinja template gets rendered execution_date = timezone.datetime(2020, 1, 1) - dag = DAG("test_experiment", start_date=execution_date) + dag = DAG("test_experiment", schedule=None, start_date=execution_date) op = SageMakerCreateExperimentOperator( name="the name", description="the desc", diff --git a/tests/providers/amazon/aws/sensors/test_rds.py b/tests/providers/amazon/aws/sensors/test_rds.py index fa771eff9dd4a..4edad83add0ef 100644 --- a/tests/providers/amazon/aws/sensors/test_rds.py +++ b/tests/providers/amazon/aws/sensors/test_rds.py @@ -105,7 +105,11 @@ class TestBaseRdsSensor: @classmethod def setup_class(cls): - cls.dag = DAG("test_dag", default_args={"owner": "airflow", "start_date": DEFAULT_DATE}) + cls.dag = DAG( + dag_id="test_dag", + schedule=None, + default_args={"owner": "airflow", "start_date": DEFAULT_DATE}, + ) cls.base_sensor = RdsBaseSensor(task_id="test_task", aws_conn_id="aws_default", dag=cls.dag) @classmethod @@ -121,7 +125,11 @@ def test_hook_attribute(self): class TestRdsSnapshotExistenceSensor: @classmethod def setup_class(cls): - cls.dag = DAG("test_dag", default_args={"owner": "airflow", "start_date": DEFAULT_DATE}) + cls.dag = DAG( + dag_id="test_dag", + schedule=None, + default_args={"owner": "airflow", "start_date": DEFAULT_DATE}, + ) cls.hook = RdsHook() @classmethod @@ -180,7 +188,11 @@ def test_db_instance_cluster_poke_false(self): class TestRdsExportTaskExistenceSensor: @classmethod def setup_class(cls): - cls.dag = DAG("test_dag", default_args={"owner": "airflow", "start_date": DEFAULT_DATE}) + cls.dag = DAG( + dag_id="test_dag", + schedule=None, + default_args={"owner": "airflow", "start_date": DEFAULT_DATE}, + ) cls.hook = RdsHook(aws_conn_id=AWS_CONN, region_name="us-east-1") @classmethod @@ -215,7 +227,11 @@ def test_export_task_poke_false(self): class TestRdsDbSensor: @classmethod def setup_class(cls): - cls.dag = DAG("test_dag", default_args={"owner": "airflow", "start_date": DEFAULT_DATE}) + cls.dag = DAG( + dag_id="test_dag", + schedule=None, + default_args={"owner": "airflow", "start_date": DEFAULT_DATE}, + ) cls.hook = RdsHook() @classmethod diff --git a/tests/providers/amazon/aws/sensors/test_s3.py b/tests/providers/amazon/aws/sensors/test_s3.py index e51b876239eac..2d9ee9d52eae7 100644 --- a/tests/providers/amazon/aws/sensors/test_s3.py +++ b/tests/providers/amazon/aws/sensors/test_s3.py @@ -117,7 +117,7 @@ def test_parse_bucket_key_from_jinja(self, mock_head_object, session, clean_dags execution_date = timezone.datetime(2020, 1, 1) - dag = DAG("test_s3_key", start_date=execution_date) + dag = DAG("test_s3_key", schedule=None, start_date=execution_date) op = S3KeySensor( task_id="s3_key_sensor", bucket_key="{{ var.value.test_bucket_key }}", @@ -148,7 +148,7 @@ def test_parse_list_of_bucket_keys_from_jinja(self, mock_head_object, session, c execution_date = timezone.datetime(2020, 1, 1) - dag = DAG("test_s3_key", start_date=execution_date, render_template_as_native_obj=True) + dag = DAG("test_s3_key", schedule=None, start_date=execution_date, render_template_as_native_obj=True) op = S3KeySensor( task_id="s3_key_sensor", bucket_key="{{ var.value.test_bucket_key }}", diff --git a/tests/providers/amazon/aws/transfers/test_base.py b/tests/providers/amazon/aws/transfers/test_base.py index 8fd8c953c3698..b5144f4a7f64c 100644 --- a/tests/providers/amazon/aws/transfers/test_base.py +++ b/tests/providers/amazon/aws/transfers/test_base.py @@ -32,7 +32,7 @@ class TestAwsToAwsBaseOperator: def setup_method(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE} - self.dag = DAG("test_dag_id", default_args=args) + self.dag = DAG("test_dag_id", schedule=None, default_args=args) @pytest.mark.db_test def test_render_template(self, session, clean_dags_and_dagruns): diff --git a/tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py b/tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py index 202b4f63c00bf..ad9400c72ade8 100644 --- a/tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py @@ -297,7 +297,7 @@ def test_dynamodb_to_s3_with_just_dest_aws_conn_id(self, mock_aws_dynamodb_hook, @pytest.mark.db_test def test_render_template(self, session): - dag = DAG("test_render_template_dag_id", start_date=datetime(2020, 1, 1)) + dag = DAG("test_render_template_dag_id", schedule=None, start_date=datetime(2020, 1, 1)) operator = DynamoDBToS3Operator( task_id="dynamodb_to_s3_test_render", dag=dag, diff --git a/tests/providers/amazon/aws/transfers/test_hive_to_dynamodb.py b/tests/providers/amazon/aws/transfers/test_hive_to_dynamodb.py index fcd01cb7e4abc..0fc8d793660e2 100644 --- a/tests/providers/amazon/aws/transfers/test_hive_to_dynamodb.py +++ b/tests/providers/amazon/aws/transfers/test_hive_to_dynamodb.py @@ -36,7 +36,7 @@ class TestHiveToDynamoDBOperator: def setup_method(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE} - dag = DAG("test_dag_id", default_args=args) + dag = DAG("test_dag_id", schedule=None, default_args=args) self.dag = dag self.sql = "SELECT 1" self.hook = DynamoDBHook(aws_conn_id="aws_default", region_name="us-east-1") diff --git a/tests/providers/amazon/aws/transfers/test_http_to_s3.py b/tests/providers/amazon/aws/transfers/test_http_to_s3.py index 89b224932f91a..aa95b9ec8ef39 100644 --- a/tests/providers/amazon/aws/transfers/test_http_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_http_to_s3.py @@ -33,7 +33,7 @@ class TestHttpToS3Operator: def setup_method(self): args = {"owner": "airflow", "start_date": datetime.datetime(2017, 1, 1)} - self.dag = DAG("test_dag_id", default_args=args) + self.dag = DAG("test_dag_id", schedule=None, default_args=args) self.http_conn_id = "HTTP_EXAMPLE" self.response = b"Example.com fake response" self.endpoint = "/" diff --git a/tests/providers/amazon/aws/transfers/test_local_to_s3.py b/tests/providers/amazon/aws/transfers/test_local_to_s3.py index fa1d294239b29..7da90d39c86a1 100644 --- a/tests/providers/amazon/aws/transfers/test_local_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_local_to_s3.py @@ -33,7 +33,7 @@ class TestFileToS3Operator: def setup_method(self): args = {"owner": "airflow", "start_date": datetime.datetime(2017, 1, 1)} - self.dag = DAG("test_dag_id", default_args=args) + self.dag = DAG("test_dag_id", schedule=None, default_args=args) self.dest_key = "test/test1.csv" self.dest_bucket = "dummy" self.testfile1 = "/tmp/fake1.csv" diff --git a/tests/providers/amazon/aws/transfers/test_mongo_to_s3.py b/tests/providers/amazon/aws/transfers/test_mongo_to_s3.py index f86ef1389c8bf..63afce78c4ff9 100644 --- a/tests/providers/amazon/aws/transfers/test_mongo_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_mongo_to_s3.py @@ -46,7 +46,7 @@ class TestMongoToS3Operator: def setup_method(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE} - self.dag = DAG("test_dag_id", default_args=args) + self.dag = DAG("test_dag_id", schedule=None, default_args=args) self.mock_operator = MongoToS3Operator( task_id=TASK_ID, diff --git a/tests/providers/apache/druid/transfers/test_hive_to_druid.py b/tests/providers/apache/druid/transfers/test_hive_to_druid.py index 5900707c7743a..545ae8e94dd82 100644 --- a/tests/providers/apache/druid/transfers/test_hive_to_druid.py +++ b/tests/providers/apache/druid/transfers/test_hive_to_druid.py @@ -57,7 +57,7 @@ def setup_method(self): import requests_mock args = {"owner": "airflow", "start_date": "2017-01-01"} - self.dag = DAG("hive_to_druid", default_args=args) + self.dag = DAG("hive_to_druid", schedule=None, default_args=args) session = requests.Session() adapter = requests_mock.Adapter() diff --git a/tests/providers/apache/flink/operators/test_flink_kubernetes.py b/tests/providers/apache/flink/operators/test_flink_kubernetes.py index bbf85d7e8bdb7..96e03fb0acd58 100644 --- a/tests/providers/apache/flink/operators/test_flink_kubernetes.py +++ b/tests/providers/apache/flink/operators/test_flink_kubernetes.py @@ -200,7 +200,7 @@ def setup_method(self): ) ) args = {"owner": "airflow", "start_date": timezone.datetime(2020, 2, 1)} - self.dag = DAG("test_dag_id", default_args=args) + self.dag = DAG("test_dag_id", schedule=None, default_args=args) @patch("kubernetes.client.api.custom_objects_api.CustomObjectsApi.create_namespaced_custom_object") def test_create_application_from_yaml(self, mock_create_namespaced_crd, mock_kubernetes_hook): diff --git a/tests/providers/apache/flink/sensors/test_flink_kubernetes.py b/tests/providers/apache/flink/sensors/test_flink_kubernetes.py index 2e5b635fdcdc6..59b794702ccd9 100644 --- a/tests/providers/apache/flink/sensors/test_flink_kubernetes.py +++ b/tests/providers/apache/flink/sensors/test_flink_kubernetes.py @@ -883,7 +883,7 @@ def setup_method(self): ) ) args = {"owner": "airflow", "start_date": timezone.datetime(2020, 2, 1)} - self.dag = DAG("test_dag_id", default_args=args) + self.dag = DAG("test_dag_id", schedule=None, default_args=args) @patch( "kubernetes.client.api.custom_objects_api.CustomObjectsApi.get_namespaced_custom_object", diff --git a/tests/providers/apache/hive/__init__.py b/tests/providers/apache/hive/__init__.py index 3db74244b8395..f1931125da5d8 100644 --- a/tests/providers/apache/hive/__init__.py +++ b/tests/providers/apache/hive/__init__.py @@ -32,7 +32,7 @@ class TestHiveEnvironment: def setup_method(self, method): args = {"owner": "airflow", "start_date": DEFAULT_DATE} - dag = DAG("test_dag_id", default_args=args) + dag = DAG("test_dag_id", schedule=None, default_args=args) self.dag = dag self.hql = """ USE airflow; diff --git a/tests/providers/apache/hive/hooks/test_hive.py b/tests/providers/apache/hive/hooks/test_hive.py index f81d31a04c316..93494876175ad 100644 --- a/tests/providers/apache/hive/hooks/test_hive.py +++ b/tests/providers/apache/hive/hooks/test_hive.py @@ -591,7 +591,7 @@ def _upload_dataframe(self): def setup_method(self): self._upload_dataframe() args = {"owner": "airflow", "start_date": DEFAULT_DATE} - self.dag = DAG("test_dag_id", default_args=args) + self.dag = DAG("test_dag_id", schedule=None, default_args=args) self.database = "airflow" self.table = "hive_server_hook" diff --git a/tests/providers/apache/hive/sensors/test_named_hive_partition.py b/tests/providers/apache/hive/sensors/test_named_hive_partition.py index 4f867b45fe9b8..01827692273a6 100644 --- a/tests/providers/apache/hive/sensors/test_named_hive_partition.py +++ b/tests/providers/apache/hive/sensors/test_named_hive_partition.py @@ -39,7 +39,7 @@ class TestNamedHivePartitionSensor: def setup_method(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE} - self.dag = DAG("test_dag_id", default_args=args) + self.dag = DAG("test_dag_id", schedule=None, default_args=args) self.next_day = (DEFAULT_DATE + timedelta(days=1)).isoformat()[:10] self.database = "airflow" self.partition_by = "ds" diff --git a/tests/providers/apache/hive/transfers/test_vertica_to_hive.py b/tests/providers/apache/hive/transfers/test_vertica_to_hive.py index 4fbe5db0d9bdb..9a368db2f7a56 100644 --- a/tests/providers/apache/hive/transfers/test_vertica_to_hive.py +++ b/tests/providers/apache/hive/transfers/test_vertica_to_hive.py @@ -46,7 +46,7 @@ def mock_get_conn(): class TestVerticaToHiveTransfer: def setup_method(self): args = {"owner": "airflow", "start_date": datetime.datetime(2017, 1, 1)} - self.dag = DAG("test_dag_id", default_args=args) + self.dag = DAG("test_dag_id", schedule=None, default_args=args) @mock.patch( "airflow.providers.apache.hive.transfers.vertica_to_hive.VerticaHook.get_conn", diff --git a/tests/providers/apache/kylin/operators/test_kylin_cube.py b/tests/providers/apache/kylin/operators/test_kylin_cube.py index 1e8df75e61808..572e27b9037ec 100644 --- a/tests/providers/apache/kylin/operators/test_kylin_cube.py +++ b/tests/providers/apache/kylin/operators/test_kylin_cube.py @@ -61,7 +61,7 @@ class TestKylinCubeOperator: def setup_method(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE} - self.dag = DAG("test_dag_id", default_args=args) + self.dag = DAG("test_dag_id", schedule=None, default_args=args) @patch("airflow.providers.apache.kylin.operators.kylin_cube.KylinHook") def test_execute(self, mock_hook): diff --git a/tests/providers/apache/livy/operators/test_livy.py b/tests/providers/apache/livy/operators/test_livy.py index ffb710587f70d..be1674e189ff9 100644 --- a/tests/providers/apache/livy/operators/test_livy.py +++ b/tests/providers/apache/livy/operators/test_livy.py @@ -41,7 +41,7 @@ class TestLivyOperator: def setup_method(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE} - self.dag = DAG("test_dag_id", default_args=args) + self.dag = DAG("test_dag_id", schedule=None, default_args=args) db.merge_conn( Connection( conn_id="livyunittest", conn_type="livy", host="localhost:8998", port="8998", schema="http" diff --git a/tests/providers/apache/livy/sensors/test_livy.py b/tests/providers/apache/livy/sensors/test_livy.py index 099de060a40c9..af4ce5fd71bde 100644 --- a/tests/providers/apache/livy/sensors/test_livy.py +++ b/tests/providers/apache/livy/sensors/test_livy.py @@ -35,7 +35,7 @@ class TestLivySensor: def setup_method(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE} - self.dag = DAG("test_dag_id", default_args=args) + self.dag = DAG("test_dag_id", schedule=None, default_args=args) db.merge_conn(Connection(conn_id="livyunittest", conn_type="livy", host="http://localhost:8998")) @pytest.mark.parametrize( diff --git a/tests/providers/apache/spark/operators/test_spark_jdbc.py b/tests/providers/apache/spark/operators/test_spark_jdbc.py index 955337c9fa372..980e359946558 100644 --- a/tests/providers/apache/spark/operators/test_spark_jdbc.py +++ b/tests/providers/apache/spark/operators/test_spark_jdbc.py @@ -60,7 +60,7 @@ class TestSparkJDBCOperator: def setup_method(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE} - self.dag = DAG("test_dag_id", default_args=args) + self.dag = DAG("test_dag_id", schedule=None, default_args=args) def test_execute(self): # Given / When diff --git a/tests/providers/apache/spark/operators/test_spark_sql.py b/tests/providers/apache/spark/operators/test_spark_sql.py index 66bc810ffb401..070826bdd427b 100644 --- a/tests/providers/apache/spark/operators/test_spark_sql.py +++ b/tests/providers/apache/spark/operators/test_spark_sql.py @@ -45,7 +45,7 @@ class TestSparkSqlOperator: def setup_method(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE} - self.dag = DAG("test_dag_id", default_args=args) + self.dag = DAG("test_dag_id", schedule=None, default_args=args) def test_execute(self): # Given / When diff --git a/tests/providers/apache/spark/operators/test_spark_submit.py b/tests/providers/apache/spark/operators/test_spark_submit.py index e93a45b21e9c7..87fb50ff91f42 100644 --- a/tests/providers/apache/spark/operators/test_spark_submit.py +++ b/tests/providers/apache/spark/operators/test_spark_submit.py @@ -75,7 +75,7 @@ class TestSparkSubmitOperator: def setup_method(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE} - self.dag = DAG("test_dag_id", default_args=args) + self.dag = DAG("test_dag_id", schedule=None, default_args=args) def test_execute(self): # Given / When diff --git a/tests/providers/arangodb/sensors/test_arangodb.py b/tests/providers/arangodb/sensors/test_arangodb.py index 2d7fd102d2500..a114711b98545 100644 --- a/tests/providers/arangodb/sensors/test_arangodb.py +++ b/tests/providers/arangodb/sensors/test_arangodb.py @@ -37,7 +37,7 @@ class TestAQLSensor: def setup_method(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE} - dag = DAG("test_dag_id", default_args=args) + dag = DAG("test_dag_id", schedule=None, default_args=args) self.dag = dag db.merge_conn( Connection( diff --git a/tests/providers/asana/operators/test_asana_tasks.py b/tests/providers/asana/operators/test_asana_tasks.py index faef538bbefdb..4b98b25ecc71c 100644 --- a/tests/providers/asana/operators/test_asana_tasks.py +++ b/tests/providers/asana/operators/test_asana_tasks.py @@ -16,6 +16,7 @@ # under the License. from __future__ import annotations +from datetime import timedelta from unittest.mock import Mock, patch import pytest @@ -46,7 +47,7 @@ class TestAsanaTaskOperators: def setup_method(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE} - dag = DAG(TEST_DAG_ID, default_args=args) + dag = DAG(TEST_DAG_ID, schedule=timedelta(days=1), default_args=args) self.dag = dag db.merge_conn(Connection(conn_id="asana_test", conn_type="asana", password="test")) diff --git a/tests/providers/celery/executors/test_celery_executor.py b/tests/providers/celery/executors/test_celery_executor.py index e0913054d1dae..2f12cec2b4c6a 100644 --- a/tests/providers/celery/executors/test_celery_executor.py +++ b/tests/providers/celery/executors/test_celery_executor.py @@ -185,7 +185,7 @@ def test_command_validation(self, command, raise_exception): def test_try_adopt_task_instances_none(self): start_date = timezone.utcnow() - timedelta(days=2) - with DAG("test_try_adopt_task_instances_none"): + with DAG("test_try_adopt_task_instances_none", schedule=None): task_1 = BaseOperator(task_id="task_1", start_date=start_date) key1 = TaskInstance(task=task_1, run_id=None) @@ -200,7 +200,7 @@ def test_try_adopt_task_instances_none(self): def test_try_adopt_task_instances(self): start_date = timezone.utcnow() - timedelta(days=2) - with DAG("test_try_adopt_task_instances_none") as dag: + with DAG("test_try_adopt_task_instances_none", schedule=None) as dag: task_1 = BaseOperator(task_id="task_1", start_date=start_date) task_2 = BaseOperator(task_id="task_2", start_date=start_date) @@ -237,7 +237,7 @@ def mock_celery_revoke(self): def test_cleanup_stuck_queued_tasks(self, mock_fail): start_date = timezone.utcnow() - timedelta(days=2) - with DAG("test_cleanup_stuck_queued_tasks_failed"): + with DAG("test_cleanup_stuck_queued_tasks_failed", schedule=None): task = BaseOperator(task_id="task_1", start_date=start_date) ti = TaskInstance(task=task, run_id=None) diff --git a/tests/providers/cncf/kubernetes/operators/test_job.py b/tests/providers/cncf/kubernetes/operators/test_job.py index 307a4ff8425b3..c920d74a4ab41 100644 --- a/tests/providers/cncf/kubernetes/operators/test_job.py +++ b/tests/providers/cncf/kubernetes/operators/test_job.py @@ -51,7 +51,7 @@ def create_context(task, persist_to_db=False, map_index=None): if task.has_dag(): dag = task.dag else: - dag = DAG(dag_id="dag", start_date=pendulum.now()) + dag = DAG(dag_id="dag", schedule=None, start_date=pendulum.now()) dag.add_task(task) dag_run = DagRun( run_id=DagRun.generate_run_id(DagRunType.MANUAL, DEFAULT_DATE), @@ -461,7 +461,7 @@ def test_task_id_as_name_with_suffix_very_long(self): ) def test_task_id_as_name_dag_id_is_ignored(self): - dag = DAG(dag_id="this_is_a_dag_name", start_date=pendulum.now()) + dag = DAG(dag_id="this_is_a_dag_name", schedule=None, start_date=pendulum.now()) k = KubernetesJobOperator( task_id="a_very_reasonable_task_name", dag=dag, diff --git a/tests/providers/cncf/kubernetes/operators/test_pod.py b/tests/providers/cncf/kubernetes/operators/test_pod.py index fd01a3e4952a7..1ca0d9851b18c 100644 --- a/tests/providers/cncf/kubernetes/operators/test_pod.py +++ b/tests/providers/cncf/kubernetes/operators/test_pod.py @@ -90,7 +90,7 @@ def create_context(task, persist_to_db=False, map_index=None): if task.has_dag(): dag = task.dag else: - dag = DAG(dag_id="dag", start_date=pendulum.now()) + dag = DAG(dag_id="dag", schedule=None, start_date=pendulum.now()) dag.add_task(task) dag_run = DagRun( run_id=DagRun.generate_run_id(DagRunType.MANUAL, DEFAULT_DATE), @@ -253,6 +253,7 @@ def test_config_path(self, hook_mock): def test_env_vars(self, input, render_template_as_native_obj, raises_error): dag = DAG( dag_id="dag", + schedule=None, start_date=pendulum.now(), render_template_as_native_obj=render_template_as_native_obj, ) @@ -1347,7 +1348,7 @@ def test_mark_checked_if_not_deleted( self, mock_patch_already_checked, mock_delete_pod, task_kwargs, should_fail, should_be_deleted ): """If we aren't deleting pods mark "checked" if the task completes (successful or otherwise)""" - dag = DAG("hello2", start_date=pendulum.now()) + dag = DAG("hello2", schedule=None, start_date=pendulum.now()) k = KubernetesPodOperator( task_id="task", dag=dag, @@ -1416,7 +1417,7 @@ def test_task_id_as_name_with_suffix_very_long(self): ) def test_task_id_as_name_dag_id_is_ignored(self): - dag = DAG(dag_id="this_is_a_dag_name", start_date=pendulum.now()) + dag = DAG(dag_id="this_is_a_dag_name", schedule=None, start_date=pendulum.now()) k = KubernetesPodOperator( task_id="a_very_reasonable_task_name", dag=dag, diff --git a/tests/providers/cncf/kubernetes/operators/test_resource.py b/tests/providers/cncf/kubernetes/operators/test_resource.py index ec7a8c8f7a0c5..9f4f004ce50d2 100644 --- a/tests/providers/cncf/kubernetes/operators/test_resource.py +++ b/tests/providers/cncf/kubernetes/operators/test_resource.py @@ -91,7 +91,7 @@ def setup_tests(self, dag_maker): def setup_method(self): args = {"owner": "airflow", "start_date": timezone.datetime(2020, 2, 1)} - self.dag = DAG("test_dag_id", default_args=args) + self.dag = DAG("test_dag_id", schedule=None, default_args=args) @patch("kubernetes.config.load_kube_config") @patch("kubernetes.client.api.CoreV1Api.create_namespaced_persistent_volume_claim") diff --git a/tests/providers/cncf/kubernetes/operators/test_spark_kubernetes.py b/tests/providers/cncf/kubernetes/operators/test_spark_kubernetes.py index 2ae5e5f1a940c..343190d093e5a 100644 --- a/tests/providers/cncf/kubernetes/operators/test_spark_kubernetes.py +++ b/tests/providers/cncf/kubernetes/operators/test_spark_kubernetes.py @@ -166,7 +166,7 @@ def test_spark_kubernetes_operator_hook(mock_kubernetes_hook, data_file): def create_context(task): - dag = DAG(dag_id="dag") + dag = DAG(dag_id="dag", schedule=None) tzinfo = pendulum.timezone("Europe/Amsterdam") execution_date = timezone.datetime(2016, 1, 1, 1, 0, 0, tzinfo=tzinfo) dag_run = DagRun( @@ -197,7 +197,7 @@ def create_context(task): @patch("kubernetes.client.api.custom_objects_api.CustomObjectsApi.get_namespaced_custom_object_status") @patch("kubernetes.client.api.custom_objects_api.CustomObjectsApi.create_namespaced_custom_object") class TestSparkKubernetesOperator: - def setUp(self): + def setup_method(self): db.merge_conn( Connection(conn_id="kubernetes_default_kube_config", conn_type="kubernetes", extra=json.dumps({})) ) @@ -209,7 +209,7 @@ def setUp(self): ) ) args = {"owner": "airflow", "start_date": timezone.datetime(2020, 2, 1)} - self.dag = DAG("test_dag_id", default_args=args) + self.dag = DAG("test_dag_id", schedule=None, default_args=args) def execute_operator(self, task_name, mock_create_job_name, job_spec): mock_create_job_name.return_value = task_name diff --git a/tests/providers/cncf/kubernetes/sensors/test_spark_kubernetes.py b/tests/providers/cncf/kubernetes/sensors/test_spark_kubernetes.py index 3f8e626df92da..ac0f56a309e47 100644 --- a/tests/providers/cncf/kubernetes/sensors/test_spark_kubernetes.py +++ b/tests/providers/cncf/kubernetes/sensors/test_spark_kubernetes.py @@ -565,7 +565,7 @@ def setup_method(self): ) ) args = {"owner": "airflow", "start_date": timezone.datetime(2020, 2, 1)} - self.dag = DAG("test_dag_id", default_args=args) + self.dag = DAG("test_dag_id", schedule=None, default_args=args) def test_init(self, mock_kubernetes_hook): sensor = SparkKubernetesSensor(task_id="task", application_name="application") diff --git a/tests/providers/common/sql/operators/test_sql.py b/tests/providers/common/sql/operators/test_sql.py index 43b355d9da4cc..dbc99e1c30e96 100644 --- a/tests/providers/common/sql/operators/test_sql.py +++ b/tests/providers/common/sql/operators/test_sql.py @@ -63,7 +63,12 @@ def _get_mock_db_hook(): class TestBaseSQLOperator: def _construct_operator(self, **kwargs): - dag = DAG("test_dag", start_date=datetime.datetime(2017, 1, 1), render_template_as_native_obj=True) + dag = DAG( + "test_dag", + schedule=None, + start_date=datetime.datetime(2017, 1, 1), + render_template_as_native_obj=True, + ) return BaseSQLOperator( task_id="test_task", conn_id="{{ conn_id }}", @@ -85,7 +90,7 @@ def test_templated_fields(self): class TestSQLExecuteQueryOperator: def _construct_operator(self, sql, **kwargs): - dag = DAG("test_dag", start_date=datetime.datetime(2017, 1, 1)) + dag = DAG("test_dag", schedule=None, start_date=datetime.datetime(2017, 1, 1)) return SQLExecuteQueryOperator( task_id="test_task", conn_id="default_conn", @@ -708,7 +713,7 @@ def setup_method(self): self.conn_id = "default_conn" def _construct_operator(self, sql, pass_value, tolerance=None): - dag = DAG("test_dag", start_date=datetime.datetime(2017, 1, 1)) + dag = DAG("test_dag", schedule=None, start_date=datetime.datetime(2017, 1, 1)) return SQLValueCheckOperator( dag=dag, @@ -882,7 +887,7 @@ def returned_row(): class TestThresholdCheckOperator: def _construct_operator(self, sql, min_threshold, max_threshold): - dag = DAG("test_dag", start_date=datetime.datetime(2017, 1, 1)) + dag = DAG("test_dag", schedule=None, start_date=datetime.datetime(2017, 1, 1)) return SQLThresholdCheckOperator( task_id="test_task", diff --git a/tests/providers/common/sql/sensors/test_sql.py b/tests/providers/common/sql/sensors/test_sql.py index 632557ff4f3aa..ee07c1b0ec0b8 100644 --- a/tests/providers/common/sql/sensors/test_sql.py +++ b/tests/providers/common/sql/sensors/test_sql.py @@ -40,7 +40,7 @@ class TestSqlSensor: def setup_method(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE} - self.dag = DAG(TEST_DAG_ID, default_args=args) + self.dag = DAG(TEST_DAG_ID, schedule=None, default_args=args) @pytest.mark.db_test def test_unsupported_conn_type(self): diff --git a/tests/providers/databricks/operators/test_databricks.py b/tests/providers/databricks/operators/test_databricks.py index 7ff2295eda94a..c9e540903f18e 100644 --- a/tests/providers/databricks/operators/test_databricks.py +++ b/tests/providers/databricks/operators/test_databricks.py @@ -389,7 +389,7 @@ def test_init_with_merging(self): def test_init_with_templating(self): json = {"name": "test-{{ ds }}"} - dag = DAG("test", start_date=datetime.now()) + dag = DAG("test", schedule=None, start_date=datetime.now()) op = DatabricksCreateJobsOperator(dag=dag, task_id=TASK_ID, json=json) op.render_template_fields(context={"ds": DATE}) expected = utils.normalise_json_content({"name": f"test-{DATE}"}) @@ -765,7 +765,7 @@ def test_init_with_templating(self): "new_cluster": NEW_CLUSTER, "notebook_task": TEMPLATED_NOTEBOOK_TASK, } - dag = DAG("test", start_date=datetime.now()) + dag = DAG("test", schedule=None, start_date=datetime.now()) op = DatabricksSubmitRunOperator(dag=dag, task_id=TASK_ID, json=json) op.render_template_fields(context={"ds": DATE}) expected = utils.normalise_json_content( @@ -1197,7 +1197,7 @@ def test_init_with_merging(self): def test_init_with_templating(self): json = {"notebook_params": NOTEBOOK_PARAMS, "jar_params": TEMPLATED_JAR_PARAMS} - dag = DAG("test", start_date=datetime.now()) + dag = DAG("test", schedule=None, start_date=datetime.now()) op = DatabricksRunNowOperator(dag=dag, task_id=TASK_ID, job_id=JOB_ID, json=json) op.render_template_fields(context={"ds": DATE}) expected = utils.normalise_json_content( @@ -2039,7 +2039,7 @@ def test_extend_workflow_notebook_packages(self): def test_convert_to_databricks_workflow_task(self): """Test that the operator can convert itself to a Databricks workflow task.""" - dag = DAG(dag_id="example_dag", start_date=datetime.now()) + dag = DAG(dag_id="example_dag", schedule=None, start_date=datetime.now()) operator = DatabricksNotebookOperator( notebook_path="/path/to/notebook", source="WORKSPACE", diff --git a/tests/providers/databricks/operators/test_databricks_workflow.py b/tests/providers/databricks/operators/test_databricks_workflow.py index 99f1a9d14815d..4c3f54b800ae9 100644 --- a/tests/providers/databricks/operators/test_databricks_workflow.py +++ b/tests/providers/databricks/operators/test_databricks_workflow.py @@ -177,7 +177,7 @@ def mock_databricks_workflow_operator(): def test_task_group_initialization(): """Test that DatabricksWorkflowTaskGroup initializes correctly.""" - with DAG(dag_id="example_databricks_workflow_dag", start_date=DEFAULT_DATE) as example_dag: + with DAG(dag_id="example_databricks_workflow_dag", schedule=None, start_date=DEFAULT_DATE) as example_dag: with DatabricksWorkflowTaskGroup( group_id="test_databricks_workflow", databricks_conn_id="databricks_conn" ) as task_group: @@ -190,7 +190,7 @@ def test_task_group_initialization(): def test_task_group_exit_creates_operator(mock_databricks_workflow_operator): """Test that DatabricksWorkflowTaskGroup creates a _CreateDatabricksWorkflowOperator on exit.""" - with DAG(dag_id="example_databricks_workflow_dag", start_date=DEFAULT_DATE) as example_dag: + with DAG(dag_id="example_databricks_workflow_dag", schedule=None, start_date=DEFAULT_DATE) as example_dag: with DatabricksWorkflowTaskGroup( group_id="test_databricks_workflow", databricks_conn_id="databricks_conn", @@ -220,7 +220,7 @@ def test_task_group_exit_creates_operator(mock_databricks_workflow_operator): def test_task_group_root_tasks_set_upstream_to_operator(mock_databricks_workflow_operator): """Test that tasks added to a DatabricksWorkflowTaskGroup are set upstream to the operator.""" - with DAG(dag_id="example_databricks_workflow_dag", start_date=DEFAULT_DATE): + with DAG(dag_id="example_databricks_workflow_dag", schedule=None, start_date=DEFAULT_DATE): with DatabricksWorkflowTaskGroup( group_id="test_databricks_workflow1", databricks_conn_id="databricks_conn", diff --git a/tests/providers/databricks/sensors/test_databricks_partition.py b/tests/providers/databricks/sensors/test_databricks_partition.py index 09848ca98f380..c9fed9efd67a5 100644 --- a/tests/providers/databricks/sensors/test_databricks_partition.py +++ b/tests/providers/databricks/sensors/test_databricks_partition.py @@ -61,7 +61,7 @@ class TestDatabricksPartitionSensor: def setup_method(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE} - self.dag = DAG("test_dag_id", default_args=args) + self.dag = DAG("test_dag_id", schedule=None, default_args=args) self.partition_sensor = DatabricksPartitionSensor( task_id=TASK_ID, diff --git a/tests/providers/databricks/sensors/test_databricks_sql.py b/tests/providers/databricks/sensors/test_databricks_sql.py index d6e9cc6d3fca1..7a3961f79face 100644 --- a/tests/providers/databricks/sensors/test_databricks_sql.py +++ b/tests/providers/databricks/sensors/test_databricks_sql.py @@ -49,7 +49,7 @@ class TestDatabricksSqlSensor: def setup_method(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE} - self.dag = DAG("test_dag_id", default_args=args) + self.dag = DAG("test_dag_id", schedule=None, default_args=args) self.sensor = DatabricksSqlSensor( task_id=TASK_ID, diff --git a/tests/providers/dbt/cloud/operators/test_dbt.py b/tests/providers/dbt/cloud/operators/test_dbt.py index 136a45b9eda8d..658fe84a49d67 100644 --- a/tests/providers/dbt/cloud/operators/test_dbt.py +++ b/tests/providers/dbt/cloud/operators/test_dbt.py @@ -94,7 +94,7 @@ def setup_module(): class TestDbtCloudRunJobOperator: def setup_method(self): - self.dag = DAG("test_dbt_cloud_job_run_op", start_date=DEFAULT_DATE) + self.dag = DAG("test_dbt_cloud_job_run_op", schedule=None, start_date=DEFAULT_DATE) self.mock_ti = MagicMock() self.mock_context = {"ti": self.mock_ti} self.config = { @@ -492,7 +492,7 @@ def test_run_job_operator_link(self, conn_id, account_id, create_task_instance_o class TestDbtCloudGetJobRunArtifactOperator: def setup_method(self): - self.dag = DAG("test_dbt_cloud_get_artifact_op", start_date=DEFAULT_DATE) + self.dag = DAG("test_dbt_cloud_get_artifact_op", schedule=None, start_date=DEFAULT_DATE) @patch("airflow.providers.dbt.cloud.hooks.dbt.DbtCloudHook.get_job_run_artifact") @pytest.mark.parametrize( @@ -667,7 +667,7 @@ def test_get_artifact_with_specified_output_file(self, mock_get_artifact, conn_i class TestDbtCloudListJobsOperator: def setup_method(self): - self.dag = DAG("test_dbt_cloud_list_jobs_op", start_date=DEFAULT_DATE) + self.dag = DAG("test_dbt_cloud_list_jobs_op", schedule=None, start_date=DEFAULT_DATE) self.mock_ti = MagicMock() self.mock_context = {"ti": self.mock_ti} diff --git a/tests/providers/dingding/operators/test_dingding.py b/tests/providers/dingding/operators/test_dingding.py index d2b25c242e2ce..c138ff56a3bfb 100644 --- a/tests/providers/dingding/operators/test_dingding.py +++ b/tests/providers/dingding/operators/test_dingding.py @@ -37,7 +37,7 @@ class TestDingdingOperator: def setup_method(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE} - self.dag = DAG("test_dag_id", default_args=args) + self.dag = DAG("test_dag_id", schedule=None, default_args=args) @mock.patch("airflow.providers.dingding.operators.dingding.DingdingHook") def test_execute(self, mock_hook): diff --git a/tests/providers/discord/operators/test_discord_webhook.py b/tests/providers/discord/operators/test_discord_webhook.py index 27cbe7d6d6532..baaf33cde3883 100644 --- a/tests/providers/discord/operators/test_discord_webhook.py +++ b/tests/providers/discord/operators/test_discord_webhook.py @@ -37,7 +37,7 @@ class TestDiscordWebhookOperator: def setup_method(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE} - self.dag = DAG("test_dag_id", default_args=args) + self.dag = DAG("test_dag_id", schedule=None, default_args=args) def test_execute(self): operator = DiscordWebhookOperator(task_id="discord_webhook_task", dag=self.dag, **self._config) diff --git a/tests/providers/docker/decorators/test_docker.py b/tests/providers/docker/decorators/test_docker.py index 93db9f211b4db..6c80b03ce8054 100644 --- a/tests/providers/docker/decorators/test_docker.py +++ b/tests/providers/docker/decorators/test_docker.py @@ -117,7 +117,7 @@ def test_call_decorated_multiple_times(self): def do_run(): return 4 - with DAG("test", start_date=DEFAULT_DATE) as dag: + with DAG("test", schedule=None, start_date=DEFAULT_DATE) as dag: do_run() for _ in range(20): do_run() diff --git a/tests/providers/fab/auth_manager/test_security.py b/tests/providers/fab/auth_manager/test_security.py index 6dd48cc6e6e29..8966b5b5f6c67 100644 --- a/tests/providers/fab/auth_manager/test_security.py +++ b/tests/providers/fab/auth_manager/test_security.py @@ -226,8 +226,8 @@ def mock_dag_models(request, session, security_manager): @pytest.fixture def sample_dags(security_manager): dags = [ - DAG("has_access_control", access_control={"Public": {permissions.ACTION_CAN_READ}}), - DAG("no_access_control"), + DAG("has_access_control", schedule=None, access_control={"Public": {permissions.ACTION_CAN_READ}}), + DAG("no_access_control", schedule=None), ] yield dags diff --git a/tests/providers/ftp/operators/test_ftp.py b/tests/providers/ftp/operators/test_ftp.py index 24eaa2bf4ca63..e246e8dcacd79 100644 --- a/tests/providers/ftp/operators/test_ftp.py +++ b/tests/providers/ftp/operators/test_ftp.py @@ -139,7 +139,11 @@ def test_multiple_paths_put(self, mock_put): @mock.patch("airflow.providers.ftp.operators.ftp.FTPHook.store_file") def test_arg_checking(self, mock_put): - dag = DAG(dag_id="unit_tests_ftp_op_arg_checking", default_args={"start_date": DEFAULT_DATE}) + dag = DAG( + dag_id="unit_tests_ftp_op_arg_checking", + schedule=None, + default_args={"start_date": DEFAULT_DATE}, + ) # If ftp_conn_id is not passed in, it should be assigned the default connection id task_0 = FTPFileTransmitOperator( task_id="test_ftp_args_0", @@ -297,7 +301,7 @@ def test_extract_get(self, get_conn): task = FTPFileTransmitOperator( task_id=task_id, ftp_conn_id="ftp_conn_id", - dag=DAG(dag_id), + dag=DAG(dag_id, schedule=None), start_date=timezone.utcnow(), local_filepath="/path/to/local", remote_filepath="/path/to/remote", @@ -327,7 +331,7 @@ def test_extract_put(self, get_conn): task = FTPFileTransmitOperator( task_id=task_id, ftp_conn_id="ftp_conn_id", - dag=DAG(dag_id), + dag=DAG(dag_id, schedule=None), start_date=timezone.utcnow(), local_filepath="/path/to/local", remote_filepath="/path/to/remote", diff --git a/tests/providers/github/operators/test_github.py b/tests/providers/github/operators/test_github.py index 23f4c1f9d6d59..7766eda69bc2a 100644 --- a/tests/providers/github/operators/test_github.py +++ b/tests/providers/github/operators/test_github.py @@ -36,7 +36,7 @@ class TestGithubOperator: def setup_class(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE} - dag = DAG("test_dag_id", default_args=args) + dag = DAG("test_dag_id", schedule=None, default_args=args) self.dag = dag db.merge_conn( Connection( diff --git a/tests/providers/github/sensors/test_github.py b/tests/providers/github/sensors/test_github.py index 3b70daeab8f3b..e80edcceedfe7 100644 --- a/tests/providers/github/sensors/test_github.py +++ b/tests/providers/github/sensors/test_github.py @@ -36,7 +36,7 @@ class TestGithubSensor: def setup_class(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE} - dag = DAG("test_dag_id", default_args=args) + dag = DAG("test_dag_id", schedule=None, default_args=args) self.dag = dag db.merge_conn( Connection( diff --git a/tests/providers/google/cloud/operators/test_cloud_build.py b/tests/providers/google/cloud/operators/test_cloud_build.py index 8fbc5af4661c4..3bcc8ac66aa15 100644 --- a/tests/providers/google/cloud/operators/test_cloud_build.py +++ b/tests/providers/google/cloud/operators/test_cloud_build.py @@ -517,7 +517,7 @@ def test_async_load_templated_should_execute_successfully(file_type, file_conten def create_context(task): - dag = DAG(dag_id="dag") + dag = DAG(dag_id="dag", schedule=None) logical_date = datetime(2022, 1, 1, 0, 0, 0) dag_run = DagRun( dag_id=dag.dag_id, diff --git a/tests/providers/google/cloud/operators/test_dataproc.py b/tests/providers/google/cloud/operators/test_dataproc.py index c3d945c80821a..bcfe4eb818aa8 100644 --- a/tests/providers/google/cloud/operators/test_dataproc.py +++ b/tests/providers/google/cloud/operators/test_dataproc.py @@ -413,7 +413,11 @@ class DataprocTestBase: @classmethod def setup_class(cls): cls.dagbag = DagBag(dag_folder="/dev/null", include_examples=False) - cls.dag = DAG(TEST_DAG_ID, default_args={"owner": "airflow", "start_date": DEFAULT_DATE}) + cls.dag = DAG( + dag_id=TEST_DAG_ID, + schedule=None, + default_args={"owner": "airflow", "start_date": DEFAULT_DATE}, + ) def setup_method(self): self.mock_ti = MagicMock() diff --git a/tests/providers/google/cloud/operators/test_mlengine.py b/tests/providers/google/cloud/operators/test_mlengine.py index f827d4b340091..b36aa728df02c 100644 --- a/tests/providers/google/cloud/operators/test_mlengine.py +++ b/tests/providers/google/cloud/operators/test_mlengine.py @@ -1495,7 +1495,7 @@ def test_async_create_training_job_should_throw_exception(): def create_context(task): - dag = DAG(dag_id="dag") + dag = DAG(dag_id="dag", schedule=None) logical_date = datetime(2022, 1, 1, 0, 0, 0) dag_run = DagRun( dag_id=dag.dag_id, diff --git a/tests/providers/google/cloud/transfers/test_gcs_to_bigquery.py b/tests/providers/google/cloud/transfers/test_gcs_to_bigquery.py index 24ad708db6971..05ef254cb43de 100644 --- a/tests/providers/google/cloud/transfers/test_gcs_to_bigquery.py +++ b/tests/providers/google/cloud/transfers/test_gcs_to_bigquery.py @@ -1928,7 +1928,7 @@ def test_execute_complete_reassigns_job_id(self, bq_hook): assert operator.job_id == generated_job_id def create_context(self, task): - dag = DAG(dag_id="dag") + dag = DAG(dag_id="dag", schedule=None) logical_date = datetime(2022, 1, 1, 0, 0, 0) dag_run = DagRun( dag_id=dag.dag_id, diff --git a/tests/providers/google/cloud/transfers/test_local_to_gcs.py b/tests/providers/google/cloud/transfers/test_local_to_gcs.py index d994f43a2d1b4..bfa331372f64a 100644 --- a/tests/providers/google/cloud/transfers/test_local_to_gcs.py +++ b/tests/providers/google/cloud/transfers/test_local_to_gcs.py @@ -40,7 +40,7 @@ class TestFileToGcsOperator: def setup_method(self): args = {"owner": "airflow", "start_date": datetime.datetime(2017, 1, 1)} - self.dag = DAG("test_dag_id", default_args=args) + self.dag = DAG("test_dag_id", schedule=None, default_args=args) self.testfile1 = "/tmp/fake1.csv" with open(self.testfile1, "wb") as f: f.write(b"x" * 393216) diff --git a/tests/providers/http/sensors/test_http.py b/tests/providers/http/sensors/test_http.py index 4e95c844058fa..2b499a1d686c2 100644 --- a/tests/providers/http/sensors/test_http.py +++ b/tests/providers/http/sensors/test_http.py @@ -289,7 +289,7 @@ def mount(self, prefix, adapter): class TestHttpOpSensor: def setup_method(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE_ISO} - dag = DAG(TEST_DAG_ID, default_args=args) + dag = DAG(TEST_DAG_ID, schedule=None, default_args=args) self.dag = dag @mock.patch("requests.Session", FakeSession) diff --git a/tests/providers/microsoft/azure/operators/test_data_factory.py b/tests/providers/microsoft/azure/operators/test_data_factory.py index 3ce0428ee5627..ee89941166d67 100644 --- a/tests/providers/microsoft/azure/operators/test_data_factory.py +++ b/tests/providers/microsoft/azure/operators/test_data_factory.py @@ -296,7 +296,7 @@ def get_conn( def create_context(self, task, dag=None): if dag is None: - dag = DAG(dag_id="dag") + dag = DAG(dag_id="dag", schedule=None) tzinfo = pendulum.timezone("UTC") execution_date = timezone.datetime(2022, 1, 1, 1, 0, 0, tzinfo=tzinfo) dag_run = DagRun( diff --git a/tests/providers/microsoft/azure/operators/test_wasb_delete_blob.py b/tests/providers/microsoft/azure/operators/test_wasb_delete_blob.py index 02ee5d0d08394..e581db815dcef 100644 --- a/tests/providers/microsoft/azure/operators/test_wasb_delete_blob.py +++ b/tests/providers/microsoft/azure/operators/test_wasb_delete_blob.py @@ -32,7 +32,7 @@ class TestWasbDeleteBlobOperator: def setup_method(self): args = {"owner": "airflow", "start_date": datetime.datetime(2017, 1, 1)} - self.dag = DAG("test_dag_id", default_args=args) + self.dag = DAG("test_dag_id", schedule=None, default_args=args) def test_init(self): operator = WasbDeleteBlobOperator(task_id="wasb_operator_1", dag=self.dag, **self._config) diff --git a/tests/providers/microsoft/azure/sensors/test_wasb.py b/tests/providers/microsoft/azure/sensors/test_wasb.py index 96b24f8cc82cf..63ffa45165c47 100644 --- a/tests/providers/microsoft/azure/sensors/test_wasb.py +++ b/tests/providers/microsoft/azure/sensors/test_wasb.py @@ -52,7 +52,7 @@ class TestWasbBlobSensor: def setup_method(self): args = {"owner": "airflow", "start_date": datetime.datetime(2017, 1, 1)} - self.dag = DAG("test_dag_id", default_args=args) + self.dag = DAG("test_dag_id", schedule=None, default_args=args) def test_init(self): sensor = WasbBlobSensor(task_id="wasb_sensor_1", dag=self.dag, **self._config) @@ -95,7 +95,7 @@ def get_conn(self) -> Connection: def create_context(self, task, dag=None): if dag is None: - dag = DAG(dag_id="dag") + dag = DAG(dag_id="dag", schedule=None) tzinfo = pendulum.timezone("UTC") execution_date = timezone.datetime(2022, 1, 1, 1, 0, 0, tzinfo=tzinfo) dag_run = DagRun( @@ -181,7 +181,7 @@ class TestWasbPrefixSensor: def setup_method(self): args = {"owner": "airflow", "start_date": datetime.datetime(2017, 1, 1)} - self.dag = DAG("test_dag_id", default_args=args) + self.dag = DAG("test_dag_id", schedule=None, default_args=args) def test_init(self): sensor = WasbPrefixSensor(task_id="wasb_sensor_1", dag=self.dag, **self._config) @@ -224,7 +224,7 @@ def get_conn(self) -> Connection: def create_context(self, task, dag=None): if dag is None: - dag = DAG(dag_id="dag") + dag = DAG(dag_id="dag", schedule=None) tzinfo = pendulum.timezone("UTC") execution_date = timezone.datetime(2022, 1, 1, 1, 0, 0, tzinfo=tzinfo) dag_run = DagRun( diff --git a/tests/providers/microsoft/azure/transfers/test_local_to_wasb.py b/tests/providers/microsoft/azure/transfers/test_local_to_wasb.py index 9d3ebcb1441bd..9ec4e28090037 100644 --- a/tests/providers/microsoft/azure/transfers/test_local_to_wasb.py +++ b/tests/providers/microsoft/azure/transfers/test_local_to_wasb.py @@ -37,7 +37,7 @@ class TestLocalFilesystemToWasbOperator: def setup_method(self): args = {"owner": "airflow", "start_date": datetime.datetime(2017, 1, 1)} - self.dag = DAG("test_dag_id", default_args=args) + self.dag = DAG("test_dag_id", schedule=None, default_args=args) def test_init(self): operator = LocalFilesystemToWasbOperator(task_id="wasb_operator_1", dag=self.dag, **self._config) diff --git a/tests/providers/mysql/hooks/test_mysql.py b/tests/providers/mysql/hooks/test_mysql.py index e6e8bd6ca5fce..cb6005ca8cf0c 100644 --- a/tests/providers/mysql/hooks/test_mysql.py +++ b/tests/providers/mysql/hooks/test_mysql.py @@ -335,7 +335,7 @@ def __exit__(self, exc_type, exc_val, exc_tb): class TestMySql: def setup_method(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE} - dag = DAG(TEST_DAG_ID, default_args=args) + dag = DAG(TEST_DAG_ID, schedule=None, default_args=args) self.dag = dag def teardown_method(self): diff --git a/tests/providers/mysql/operators/test_mysql.py b/tests/providers/mysql/operators/test_mysql.py index 719d37024c683..10a1fcc151a82 100644 --- a/tests/providers/mysql/operators/test_mysql.py +++ b/tests/providers/mysql/operators/test_mysql.py @@ -47,7 +47,7 @@ class TestMySql: def setup_method(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE} - dag = DAG(TEST_DAG_ID, default_args=args) + dag = DAG(TEST_DAG_ID, schedule=None, default_args=args) self.dag = dag def teardown_method(self): @@ -105,7 +105,12 @@ def test_mysql_operator_resolve_parameters_template_json_file(self, tmp_path): path = tmp_path / "testfile.json" path.write_text('{\n "foo": "{{ ds }}"}') - with DAG("test-dag", start_date=DEFAULT_DATE, template_searchpath=os.fspath(path.parent)): + with DAG( + dag_id="test-dag", + schedule=None, + start_date=DEFAULT_DATE, + template_searchpath=os.fspath(path.parent), + ): task = SQLExecuteQueryOperator( task_id="op1", parameters=path.name, sql="SELECT 1", conn_id=MYSQL_DEFAULT ) diff --git a/tests/providers/mysql/transfers/test_presto_to_mysql.py b/tests/providers/mysql/transfers/test_presto_to_mysql.py index d1938f995b8fd..9af5a8097e4d1 100644 --- a/tests/providers/mysql/transfers/test_presto_to_mysql.py +++ b/tests/providers/mysql/transfers/test_presto_to_mysql.py @@ -34,7 +34,7 @@ def setup_method(self): task_id="test_presto_to_mysql_transfer", ) args = {"owner": "airflow", "start_date": DEFAULT_DATE} - self.dag = DAG("test_presto_to_mysql_transfer", default_args=args) + self.dag = DAG("test_presto_to_mysql_transfer", schedule=None, default_args=args) @patch("airflow.providers.mysql.transfers.presto_to_mysql.MySqlHook") @patch("airflow.providers.mysql.transfers.presto_to_mysql.PrestoHook") diff --git a/tests/providers/mysql/transfers/test_trino_to_mysql.py b/tests/providers/mysql/transfers/test_trino_to_mysql.py index 390c84729b2b8..612207c329f64 100644 --- a/tests/providers/mysql/transfers/test_trino_to_mysql.py +++ b/tests/providers/mysql/transfers/test_trino_to_mysql.py @@ -37,7 +37,7 @@ def setup_method(self): task_id="test_trino_to_mysql_transfer", ) args = {"owner": "airflow", "start_date": DEFAULT_DATE} - self.dag = DAG("test_trino_to_mysql_transfer", default_args=args) + self.dag = DAG("test_trino_to_mysql_transfer", schedule=None, default_args=args) @patch("airflow.providers.mysql.transfers.trino_to_mysql.MySqlHook") @patch("airflow.providers.mysql.transfers.trino_to_mysql.TrinoHook") diff --git a/tests/providers/mysql/transfers/test_vertica_to_mysql.py b/tests/providers/mysql/transfers/test_vertica_to_mysql.py index 82997a46f42ae..7656a036449f8 100644 --- a/tests/providers/mysql/transfers/test_vertica_to_mysql.py +++ b/tests/providers/mysql/transfers/test_vertica_to_mysql.py @@ -48,7 +48,7 @@ def mock_get_conn(): class TestVerticaToMySqlTransfer: def setup_method(self): args = {"owner": "airflow", "start_date": datetime.datetime(2017, 1, 1)} - self.dag = DAG("test_dag_id", default_args=args) + self.dag = DAG("test_dag_id", schedule=None, default_args=args) @mock.patch( "airflow.providers.mysql.transfers.vertica_to_mysql.VerticaHook.get_conn", side_effect=mock_get_conn diff --git a/tests/providers/openlineage/plugins/test_adapter.py b/tests/providers/openlineage/plugins/test_adapter.py index 18d7c0c8d9302..22e1da4e1f6ab 100644 --- a/tests/providers/openlineage/plugins/test_adapter.py +++ b/tests/providers/openlineage/plugins/test_adapter.py @@ -540,7 +540,12 @@ def test_emit_dag_started_event(mock_stats_incr, mock_stats_timer, generate_stat dag_id = "dag_id" run_id = str(uuid.uuid4()) - with DAG(dag_id=dag_id, description="dag desc", start_date=datetime.datetime(2024, 6, 1)) as dag: + with DAG( + dag_id=dag_id, + schedule=datetime.timedelta(days=1), + start_date=datetime.datetime(2024, 6, 1), + description="dag desc", + ) as dag: tg = TaskGroup(group_id="tg1") tg2 = TaskGroup(group_id="tg2", parent_group=tg) task_0 = BashOperator(task_id="task_0", bash_command="exit 0;") # noqa: F841 @@ -648,7 +653,7 @@ def test_emit_dag_complete_event( dag_id = "dag_id" run_id = str(uuid.uuid4()) - with DAG(dag_id=dag_id, start_date=datetime.datetime(2024, 6, 1)): + with DAG(dag_id=dag_id, schedule=None, start_date=datetime.datetime(2024, 6, 1)): task_0 = BashOperator(task_id="task_0", bash_command="exit 0;") task_1 = BashOperator(task_id="task_1", bash_command="exit 0;") task_2 = EmptyOperator( @@ -730,7 +735,7 @@ def test_emit_dag_failed_event( dag_id = "dag_id" run_id = str(uuid.uuid4()) - with DAG(dag_id=dag_id, start_date=datetime.datetime(2024, 6, 1)): + with DAG(dag_id=dag_id, schedule=None, start_date=datetime.datetime(2024, 6, 1)): task_0 = BashOperator(task_id="task_0", bash_command="exit 0;") task_1 = BashOperator(task_id="task_1", bash_command="exit 0;") task_2 = EmptyOperator(task_id="task_2.test") diff --git a/tests/providers/openlineage/plugins/test_listener.py b/tests/providers/openlineage/plugins/test_listener.py index b7004ac3929ed..eed3a0ea0efd9 100644 --- a/tests/providers/openlineage/plugins/test_listener.py +++ b/tests/providers/openlineage/plugins/test_listener.py @@ -73,6 +73,7 @@ def test_listener_does_not_change_task_instance(render_mock, xcom_push_mock): dag = DAG( "test", + schedule=None, start_date=dt.datetime(2022, 1, 1), user_defined_macros={"render_df": render_df}, params={"df": {"col": [1, 2]}}, @@ -144,6 +145,7 @@ def sample_callable(**kwargs): """ dag = DAG( f"test_{scenario_name}", + schedule=None, start_date=dt.datetime(2022, 1, 1), ) t = PythonOperator(task_id=f"test_task_{scenario_name}", dag=dag, python_callable=python_callable) @@ -590,6 +592,7 @@ class TestOpenLineageSelectiveEnable: def setup_method(self): self.dag = DAG( "test_selective_enable", + schedule=None, start_date=dt.datetime(2022, 1, 1), ) diff --git a/tests/providers/openlineage/utils/test_selective_enable.py b/tests/providers/openlineage/utils/test_selective_enable.py index d44839c5b97a3..e950a4f29d822 100644 --- a/tests/providers/openlineage/utils/test_selective_enable.py +++ b/tests/providers/openlineage/utils/test_selective_enable.py @@ -33,7 +33,7 @@ class TestOpenLineageSelectiveEnable: def setup_method(self): - @dag(dag_id="test_selective_enable_decorated_dag", start_date=now()) + @dag(dag_id="test_selective_enable_decorated_dag", schedule=None, start_date=now()) def decorated_dag(): @task def decorated_task(): @@ -43,7 +43,7 @@ def decorated_task(): self.decorated_dag = decorated_dag() - with DAG(dag_id="test_selective_enable_dag", start_date=now()) as self.dag: + with DAG(dag_id="test_selective_enable_dag", schedule=None, start_date=now()) as self.dag: self.task = EmptyOperator(task_id="test_selective_enable") def test_enable_lineage_task_level(self): diff --git a/tests/providers/openlineage/utils/test_utils.py b/tests/providers/openlineage/utils/test_utils.py index 0b6d5c720ffe6..870cd363746c7 100644 --- a/tests/providers/openlineage/utils/test_utils.py +++ b/tests/providers/openlineage/utils/test_utils.py @@ -58,7 +58,7 @@ class CustomOperatorFromEmpty(EmptyOperator): def test_get_airflow_job_facet(): - with DAG(dag_id="dag", start_date=datetime.datetime(2024, 6, 1)) as dag: + with DAG(dag_id="dag", schedule=None, start_date=datetime.datetime(2024, 6, 1)) as dag: task_0 = BashOperator(task_id="task_0", bash_command="exit 0;") with TaskGroup("section_1", prefix_group_id=True): @@ -215,7 +215,7 @@ def test_get_operator_class_mapped_operator(): def test_get_tasks_details(): - with DAG(dag_id="dag", start_date=datetime.datetime(2024, 6, 1)) as dag: + with DAG(dag_id="dag", schedule=None, start_date=datetime.datetime(2024, 6, 1)) as dag: task = CustomOperatorForTest(task_id="task", bash_command="exit 0;") # noqa: F841 task_0 = BashOperator(task_id="task_0", bash_command="exit 0;") # noqa: F841 task_1 = CustomOperatorFromEmpty(task_id="task_1") # noqa: F841 @@ -339,7 +339,7 @@ def test_get_tasks_details(): def test_get_tasks_details_empty_dag(): - assert _get_tasks_details(DAG("test_dag", start_date=datetime.datetime(2024, 6, 1))) == {} + assert _get_tasks_details(DAG("test_dag", schedule=None, start_date=datetime.datetime(2024, 6, 1))) == {} def test_dag_tree_level_indent(): @@ -350,7 +350,7 @@ def test_dag_tree_level_indent(): subsequent level in the DAG. The test asserts that the generated tree view matches the expected lines with correct indentation. """ - with DAG(dag_id="dag", start_date=datetime.datetime(2024, 6, 1)) as dag: + with DAG(dag_id="dag", schedule=None, start_date=datetime.datetime(2024, 6, 1)) as dag: task_0 = EmptyOperator(task_id="task_0") task_1 = EmptyOperator(task_id="task_1") task_2 = EmptyOperator(task_id="task_2") @@ -391,7 +391,7 @@ def process_item(item: int) -> int: def sum_values(values: list[int]) -> int: return sum(values) - with DAG(dag_id="dag", start_date=datetime.datetime(2024, 6, 1)) as dag: + with DAG(dag_id="dag", schedule=None, start_date=datetime.datetime(2024, 6, 1)) as dag: task_ = BashOperator(task_id="task", bash_command="exit 0;") task_0 = BashOperator(task_id="task_0", bash_command="exit 0;") task_1 = BashOperator(task_id="task_1", bash_command="exit 1;") @@ -463,11 +463,16 @@ def sum_values(values: list[int]) -> int: def test_get_dag_tree_empty_dag(): - assert _get_parsed_dag_tree(DAG("test_dag", start_date=datetime.datetime(2024, 6, 1))) == {} + assert ( + _get_parsed_dag_tree( + DAG("test_dag", schedule=None, start_date=datetime.datetime(2024, 6, 1)), + ) + == {} + ) def test_get_task_groups_details(): - with DAG("test_dag", start_date=datetime.datetime(2024, 6, 1)) as dag: + with DAG("test_dag", schedule=None, start_date=datetime.datetime(2024, 6, 1)) as dag: with TaskGroup("tg1", prefix_group_id=True): task_1 = EmptyOperator(task_id="task_1") # noqa: F841 with TaskGroup("tg2", prefix_group_id=False): @@ -504,7 +509,7 @@ def test_get_task_groups_details(): def test_get_task_groups_details_nested(): - with DAG("test_dag", start_date=datetime.datetime(2024, 6, 1)) as dag: + with DAG("test_dag", schedule=None, start_date=datetime.datetime(2024, 6, 1)) as dag: with TaskGroup("tg1", prefix_group_id=True) as tg: with TaskGroup("tg2", parent_group=tg) as tg2: with TaskGroup("tg3", parent_group=tg2): @@ -539,14 +544,19 @@ def test_get_task_groups_details_nested(): def test_get_task_groups_details_no_task_groups(): - assert _get_task_groups_details(DAG("test_dag", start_date=datetime.datetime(2024, 6, 1))) == {} + assert ( + _get_task_groups_details( + DAG("test_dag", schedule=None, start_date=datetime.datetime(2024, 6, 1)), + ) + == {} + ) @patch("airflow.providers.openlineage.conf.custom_run_facets", return_value=set()) def test_get_user_provided_run_facets_with_no_function_definition(mock_custom_facet_funcs): sample_ti = TaskInstance( task=EmptyOperator( - task_id="test-task", dag=DAG("test-dag", start_date=datetime.datetime(2024, 7, 1)) + task_id="test-task", dag=DAG("test-dag", schedule=None, start_date=datetime.datetime(2024, 7, 1)) ), state="running", ) @@ -561,7 +571,7 @@ def test_get_user_provided_run_facets_with_no_function_definition(mock_custom_fa def test_get_user_provided_run_facets_with_function_definition(mock_custom_facet_funcs): sample_ti = TaskInstance( task=EmptyOperator( - task_id="test-task", dag=DAG("test-dag", start_date=datetime.datetime(2024, 7, 1)) + task_id="test-task", dag=DAG("test-dag", schedule=None, start_date=datetime.datetime(2024, 7, 1)) ), state="running", ) @@ -582,7 +592,7 @@ def test_get_user_provided_run_facets_with_return_value_as_none(mock_custom_face task=BashOperator( task_id="test-task", bash_command="exit 0;", - dag=DAG("test-dag", start_date=datetime.datetime(2024, 7, 1)), + dag=DAG("test-dag", schedule=None, start_date=datetime.datetime(2024, 7, 1)), ), state="running", ) @@ -602,7 +612,7 @@ def test_get_user_provided_run_facets_with_return_value_as_none(mock_custom_face def test_get_user_provided_run_facets_with_multiple_function_definition(mock_custom_facet_funcs): sample_ti = TaskInstance( task=EmptyOperator( - task_id="test-task", dag=DAG("test-dag", start_date=datetime.datetime(2024, 7, 1)) + task_id="test-task", dag=DAG("test-dag", schedule=None, start_date=datetime.datetime(2024, 7, 1)) ), state="running", ) @@ -623,7 +633,7 @@ def test_get_user_provided_run_facets_with_multiple_function_definition(mock_cus def test_get_user_provided_run_facets_with_duplicate_facet_keys(mock_custom_facet_funcs): sample_ti = TaskInstance( task=EmptyOperator( - task_id="test-task", dag=DAG("test-dag", start_date=datetime.datetime(2024, 7, 1)) + task_id="test-task", dag=DAG("test-dag", schedule=None, start_date=datetime.datetime(2024, 7, 1)) ), state="running", ) @@ -640,7 +650,7 @@ def test_get_user_provided_run_facets_with_duplicate_facet_keys(mock_custom_face def test_get_user_provided_run_facets_with_invalid_function_definition(mock_custom_facet_funcs): sample_ti = TaskInstance( task=EmptyOperator( - task_id="test-task", dag=DAG("test-dag", start_date=datetime.datetime(2024, 7, 1)) + task_id="test-task", dag=DAG("test-dag", schedule=None, start_date=datetime.datetime(2024, 7, 1)) ), state="running", ) @@ -655,7 +665,7 @@ def test_get_user_provided_run_facets_with_invalid_function_definition(mock_cust def test_get_user_provided_run_facets_with_wrong_return_type_function(mock_custom_facet_funcs): sample_ti = TaskInstance( task=EmptyOperator( - task_id="test-task", dag=DAG("test-dag", start_date=datetime.datetime(2024, 7, 1)) + task_id="test-task", dag=DAG("test-dag", schedule=None, start_date=datetime.datetime(2024, 7, 1)) ), state="running", ) @@ -670,7 +680,7 @@ def test_get_user_provided_run_facets_with_wrong_return_type_function(mock_custo def test_get_user_provided_run_facets_with_exception(mock_custom_facet_funcs): sample_ti = TaskInstance( task=EmptyOperator( - task_id="test-task", dag=DAG("test-dag", start_date=datetime.datetime(2024, 7, 1)) + task_id="test-task", dag=DAG("test-dag", schedule=None, start_date=datetime.datetime(2024, 7, 1)) ), state="running", ) diff --git a/tests/providers/opsgenie/operators/test_opsgenie.py b/tests/providers/opsgenie/operators/test_opsgenie.py index 0194660323f7f..33a1766025979 100644 --- a/tests/providers/opsgenie/operators/test_opsgenie.py +++ b/tests/providers/opsgenie/operators/test_opsgenie.py @@ -79,7 +79,7 @@ class TestOpsgenieCreateAlertOperator: def setup_method(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE} - self.dag = DAG("test_dag_id", default_args=args) + self.dag = DAG("test_dag_id", schedule=None, default_args=args) def test_build_opsgenie_payload(self): # Given / When @@ -120,7 +120,7 @@ class TestOpsgenieCloseAlertOperator: def setup_method(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE} - self.dag = DAG("test_dag_id", default_args=args) + self.dag = DAG("test_dag_id", schedule=None, default_args=args) def test_build_opsgenie_payload(self): # Given / When @@ -147,7 +147,7 @@ def test_properties(self): class TestOpsgenieDeleteAlertOperator: def setup_method(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE} - self.dag = DAG("test_dag_id", default_args=args) + self.dag = DAG("test_dag_id", schedule=None, default_args=args) @mock.patch("airflow.providers.opsgenie.operators.opsgenie.OpsgenieAlertHook") def test_operator(self, mock_opsgenie_hook): diff --git a/tests/providers/pinecone/operators/test_pinecone.py b/tests/providers/pinecone/operators/test_pinecone.py index 20bbcc2e7d3d7..dcc1c6067ea66 100644 --- a/tests/providers/pinecone/operators/test_pinecone.py +++ b/tests/providers/pinecone/operators/test_pinecone.py @@ -39,7 +39,7 @@ def upsert(*args, **kwargs): @pytest.fixture def dummy_dag(): """Fixture to provide a dummy Airflow DAG for testing.""" - return DAG(dag_id="test_dag", start_date=datetime(2023, 9, 29)) + return DAG(dag_id="test_dag", schedule=None, start_date=datetime(2023, 9, 29)) class TestPineconeVectorIngestOperator: diff --git a/tests/providers/postgres/operators/test_postgres.py b/tests/providers/postgres/operators/test_postgres.py index 0bc34a519549c..b7bab1392339e 100644 --- a/tests/providers/postgres/operators/test_postgres.py +++ b/tests/providers/postgres/operators/test_postgres.py @@ -35,7 +35,7 @@ class TestPostgres: def setup_method(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE} - dag = DAG(TEST_DAG_ID, default_args=args) + dag = DAG(TEST_DAG_ID, schedule=None, default_args=args) self.dag = dag def teardown_method(self): @@ -136,7 +136,7 @@ class TestPostgresOpenLineage: def setup_method(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE} - dag = DAG(TEST_DAG_ID, default_args=args) + dag = DAG(TEST_DAG_ID, schedule=None, default_args=args) self.dag = dag with PostgresHook().get_conn() as conn: diff --git a/tests/providers/redis/log/test_redis_task_handler.py b/tests/providers/redis/log/test_redis_task_handler.py index 4e570cccbc4e6..f4ded2fa586ae 100644 --- a/tests/providers/redis/log/test_redis_task_handler.py +++ b/tests/providers/redis/log/test_redis_task_handler.py @@ -37,7 +37,7 @@ class TestRedisTaskHandler: @pytest.fixture def ti(self): date = datetime(2020, 1, 1) - dag = DAG(dag_id="dag_for_testing_redis_task_handler", start_date=date) + dag = DAG(dag_id="dag_for_testing_redis_task_handler", schedule=None, start_date=date) task = EmptyOperator(task_id="task_for_testing_redis_log_handler", dag=dag) dag_run = DagRun(dag_id=dag.dag_id, execution_date=date, run_id="test", run_type="scheduled") diff --git a/tests/providers/redis/operators/test_redis_publish.py b/tests/providers/redis/operators/test_redis_publish.py index ef44dbba82489..cb3d16144c1e8 100644 --- a/tests/providers/redis/operators/test_redis_publish.py +++ b/tests/providers/redis/operators/test_redis_publish.py @@ -30,7 +30,7 @@ class TestRedisPublishOperator: def setup_method(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE} - self.dag = DAG("test_dag_id", default_args=args) + self.dag = DAG("test_dag_id", schedule=None, default_args=args) self.mock_context = MagicMock() diff --git a/tests/providers/redis/sensors/test_redis_key.py b/tests/providers/redis/sensors/test_redis_key.py index 02804e450cc02..7012ed5ccf2e1 100644 --- a/tests/providers/redis/sensors/test_redis_key.py +++ b/tests/providers/redis/sensors/test_redis_key.py @@ -30,7 +30,7 @@ class TestRedisPublishOperator: def setup_method(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE} - self.dag = DAG("test_dag_id", default_args=args) + self.dag = DAG("test_dag_id", schedule=None, default_args=args) self.mock_context = MagicMock() diff --git a/tests/providers/redis/sensors/test_redis_pub_sub.py b/tests/providers/redis/sensors/test_redis_pub_sub.py index f773e3bd51730..31c0a2f6072c5 100644 --- a/tests/providers/redis/sensors/test_redis_pub_sub.py +++ b/tests/providers/redis/sensors/test_redis_pub_sub.py @@ -30,7 +30,7 @@ class TestRedisPubSubSensor: def setup_method(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE} - self.dag = DAG("test_dag_id", default_args=args) + self.dag = DAG("test_dag_id", schedule=None, default_args=args) self.mock_context = MagicMock() diff --git a/tests/providers/sftp/operators/test_sftp.py b/tests/providers/sftp/operators/test_sftp.py index d87ae26b3abda..a6835675da26f 100644 --- a/tests/providers/sftp/operators/test_sftp.py +++ b/tests/providers/sftp/operators/test_sftp.py @@ -324,7 +324,11 @@ def test_file_transfer_with_intermediate_dir_error_get(self, dag_maker, create_r @mock.patch.dict("os.environ", {"AIRFLOW_CONN_" + TEST_CONN_ID.upper(): "ssh://test_id@localhost"}) def test_arg_checking(self): - dag = DAG(dag_id="unit_tests_sftp_op_arg_checking", default_args={"start_date": DEFAULT_DATE}) + dag = DAG( + dag_id="unit_tests_sftp_op_arg_checking", + schedule=None, + default_args={"start_date": DEFAULT_DATE}, + ) # Exception should be raised if neither ssh_hook nor ssh_conn_id is provided task_0 = SFTPOperator( task_id="test_sftp_0", @@ -528,7 +532,7 @@ def test_extract_ssh_conn_id(self, get_connection, get_conn, operation, expected task = SFTPOperator( task_id=task_id, ssh_conn_id="sftp_conn_id", - dag=DAG(dag_id), + dag=DAG(dag_id, schedule=None), start_date=timezone.utcnow(), local_filepath="/path/local", remote_filepath="/path/remote", @@ -559,7 +563,7 @@ def test_extract_sftp_hook(self, get_connection, get_conn, operation, expected): task = SFTPOperator( task_id=task_id, sftp_hook=SFTPHook(ssh_conn_id="sftp_conn_id"), - dag=DAG(dag_id), + dag=DAG(dag_id, schedule=None), start_date=timezone.utcnow(), local_filepath="/path/local", remote_filepath="/path/remote", @@ -590,7 +594,7 @@ def test_extract_ssh_hook(self, get_connection, get_conn, operation, expected): task = SFTPOperator( task_id=task_id, ssh_hook=SSHHook(ssh_conn_id="sftp_conn_id"), - dag=DAG(dag_id), + dag=DAG(dag_id, schedule=None), start_date=timezone.utcnow(), local_filepath="/path/local", remote_filepath="/path/remote", diff --git a/tests/providers/slack/transfers/test_sql_to_slack_webhook.py b/tests/providers/slack/transfers/test_sql_to_slack_webhook.py index 3c71fab26a617..2f6ef63bc687f 100644 --- a/tests/providers/slack/transfers/test_sql_to_slack_webhook.py +++ b/tests/providers/slack/transfers/test_sql_to_slack_webhook.py @@ -43,7 +43,7 @@ def mocked_hook(): @pytest.mark.db_test class TestSqlToSlackWebhookOperator: def setup_method(self): - self.example_dag = DAG(TEST_DAG_ID, start_date=DEFAULT_DATE) + self.example_dag = DAG(TEST_DAG_ID, schedule=None, start_date=DEFAULT_DATE) self.default_hook_parameters = {"timeout": None, "proxy": None, "retry_handlers": None} @staticmethod diff --git a/tests/providers/snowflake/operators/test_snowflake.py b/tests/providers/snowflake/operators/test_snowflake.py index e24e8ca9db6f7..3ab6ab5f8895d 100644 --- a/tests/providers/snowflake/operators/test_snowflake.py +++ b/tests/providers/snowflake/operators/test_snowflake.py @@ -58,7 +58,7 @@ class TestSnowflakeOperator: def setup_method(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE} - dag = DAG(TEST_DAG_ID, default_args=args) + dag = DAG(TEST_DAG_ID, schedule=None, default_args=args) self.dag = dag @mock.patch("airflow.providers.common.sql.operators.sql.SQLExecuteQueryOperator.get_db_hook") @@ -174,7 +174,7 @@ def test_overwrite_params( def create_context(task, dag=None): if dag is None: - dag = DAG(dag_id="dag") + dag = DAG(dag_id="dag", schedule=None) tzinfo = pendulum.timezone("UTC") execution_date = timezone.datetime(2022, 1, 1, 1, 0, 0, tzinfo=tzinfo) dag_run = DagRun( diff --git a/tests/providers/sqlite/operators/test_sqlite.py b/tests/providers/sqlite/operators/test_sqlite.py index 79916ce2761f7..7b95c5d932a58 100644 --- a/tests/providers/sqlite/operators/test_sqlite.py +++ b/tests/providers/sqlite/operators/test_sqlite.py @@ -33,7 +33,7 @@ class TestSqliteOperator: def setup_method(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE} - dag = DAG(TEST_DAG_ID, default_args=args) + dag = DAG(TEST_DAG_ID, schedule=None, default_args=args) self.dag = dag def teardown_method(self): diff --git a/tests/sensors/test_bash.py b/tests/sensors/test_bash.py index 71cc2a5da3d78..3282f6b971221 100644 --- a/tests/sensors/test_bash.py +++ b/tests/sensors/test_bash.py @@ -29,7 +29,7 @@ class TestBashSensor: def setup_method(self): args = {"owner": "airflow", "start_date": datetime.datetime(2017, 1, 1)} - dag = DAG("test_dag_id", default_args=args) + dag = DAG("test_dag_id", schedule=None, default_args=args) self.dag = dag def test_true_condition(self): diff --git a/tests/sensors/test_date_time.py b/tests/sensors/test_date_time.py index a298300c54f32..edfd8f64aeb28 100644 --- a/tests/sensors/test_date_time.py +++ b/tests/sensors/test_date_time.py @@ -32,7 +32,7 @@ class TestDateTimeSensor: @classmethod def setup_class(cls): args = {"owner": "airflow", "start_date": DEFAULT_DATE} - cls.dag = DAG("test_dag", default_args=args) + cls.dag = DAG("test_dag", schedule=None, default_args=args) @pytest.mark.parametrize( "task_id, target_time, expected", diff --git a/tests/sensors/test_external_task_sensor.py b/tests/sensors/test_external_task_sensor.py index 66d9766d8187c..30e2b29e8536c 100644 --- a/tests/sensors/test_external_task_sensor.py +++ b/tests/sensors/test_external_task_sensor.py @@ -107,7 +107,7 @@ class TestExternalTaskSensor: def setup_method(self): self.dagbag = DagBag(dag_folder=DEV_NULL, include_examples=True) self.args = {"owner": "airflow", "start_date": DEFAULT_DATE} - self.dag = DAG(TEST_DAG_ID, default_args=self.args) + self.dag = DAG(TEST_DAG_ID, schedule=None, default_args=self.args) self.dag_run_id = DagRunType.MANUAL.generate_run_id(DEFAULT_DATE) def add_time_sensor(self, task_id=TEST_TASK_ID): @@ -947,19 +947,19 @@ def test_fail_poke( ( (None, None, {}, f"The external DAG {TEST_DAG_ID} does not exist."), ( - DAG(dag_id="test"), + DAG(dag_id="test", schedule=None), False, {}, f"The external DAG {TEST_DAG_ID} was deleted.", ), ( - DAG(dag_id="test"), + DAG(dag_id="test", schedule=None), True, {"external_task_ids": [TEST_TASK_ID, TEST_TASK_ID_ALTERNATE]}, f"The external task {TEST_TASK_ID} in DAG {TEST_DAG_ID} does not exist.", ), ( - DAG(dag_id="test"), + DAG(dag_id="test", schedule=None), True, {"external_task_group_id": [TEST_TASK_ID, TEST_TASK_ID_ALTERNATE]}, f"The external task group '{re.escape(str([TEST_TASK_ID, TEST_TASK_ID_ALTERNATE]))}'" @@ -1138,7 +1138,7 @@ def test_serialized_fields(self): assert {"recursion_depth"}.issubset(ExternalTaskMarker.get_serialized_fields()) def test_serialized_external_task_marker(self): - dag = DAG("test_serialized_external_task_marker", start_date=DEFAULT_DATE) + dag = DAG("test_serialized_external_task_marker", schedule=None, start_date=DEFAULT_DATE) task = ExternalTaskMarker( task_id="parent_task", external_dag_id="external_task_marker_child", diff --git a/tests/sensors/test_filesystem.py b/tests/sensors/test_filesystem.py index 812270c60e991..1fb123cfe7248 100644 --- a/tests/sensors/test_filesystem.py +++ b/tests/sensors/test_filesystem.py @@ -20,6 +20,7 @@ import os import shutil import tempfile +from datetime import timedelta import pytest @@ -43,7 +44,7 @@ def setup_method(self): hook = FSHook() args = {"owner": "airflow", "start_date": DEFAULT_DATE} - dag = DAG(TEST_DAG_ID + "test_schedule_dag_once", default_args=args) + dag = DAG(TEST_DAG_ID + "test_schedule_dag_once", schedule=timedelta(days=1), default_args=args) self.hook = hook self.dag = dag diff --git a/tests/sensors/test_time_delta.py b/tests/sensors/test_time_delta.py index 4d8369b783474..b437937df205d 100644 --- a/tests/sensors/test_time_delta.py +++ b/tests/sensors/test_time_delta.py @@ -40,7 +40,7 @@ class TestTimedeltaSensor: def setup_method(self): self.dagbag = DagBag(dag_folder=DEV_NULL, include_examples=True) self.args = {"owner": "airflow", "start_date": DEFAULT_DATE} - self.dag = DAG(TEST_DAG_ID, default_args=self.args) + self.dag = DAG(TEST_DAG_ID, schedule=timedelta(days=1), default_args=self.args) @pytest.mark.skip_if_database_isolation_mode # Test is broken in db isolation mode def test_timedelta_sensor(self): @@ -52,7 +52,7 @@ class TestTimeDeltaSensorAsync: def setup_method(self): self.dagbag = DagBag(dag_folder=DEV_NULL, include_examples=True) self.args = {"owner": "airflow", "start_date": DEFAULT_DATE} - self.dag = DAG(TEST_DAG_ID, default_args=self.args) + self.dag = DAG(TEST_DAG_ID, schedule=timedelta(days=1), default_args=self.args) @pytest.mark.parametrize( "should_defer", diff --git a/tests/sensors/test_time_sensor.py b/tests/sensors/test_time_sensor.py index d26fc7bf39005..7919346a61d34 100644 --- a/tests/sensors/test_time_sensor.py +++ b/tests/sensors/test_time_sensor.py @@ -46,7 +46,7 @@ class TestTimeSensor: @time_machine.travel(timezone.datetime(2020, 1, 1, 23, 0).replace(tzinfo=timezone.utc)) def test_timezone(self, default_timezone, start_date, expected, monkeypatch): monkeypatch.setattr("airflow.settings.TIMEZONE", timezone.parse_timezone(default_timezone)) - dag = DAG("test", default_args={"start_date": start_date}) + dag = DAG("test", schedule=None, default_args={"start_date": start_date}) op = TimeSensor(task_id="test", target_time=time(10, 0), dag=dag) assert op.poke(None) == expected @@ -54,7 +54,11 @@ def test_timezone(self, default_timezone, start_date, expected, monkeypatch): class TestTimeSensorAsync: @time_machine.travel("2020-07-07 00:00:00", tick=False) def test_task_is_deferred(self): - with DAG("test_task_is_deferred", start_date=timezone.datetime(2020, 1, 1, 23, 0)): + with DAG( + dag_id="test_task_is_deferred", + schedule=None, + start_date=timezone.datetime(2020, 1, 1, 23, 0), + ): op = TimeSensorAsync(task_id="test", target_time=time(10, 0)) assert not timezone.is_naive(op.target_datetime) @@ -67,7 +71,7 @@ def test_task_is_deferred(self): assert exc_info.value.method_name == "execute_complete" def test_target_time_aware(self): - with DAG("test_target_time_aware", start_date=timezone.datetime(2020, 1, 1, 23, 0)): + with DAG("test_target_time_aware", schedule=None, start_date=timezone.datetime(2020, 1, 1, 23, 0)): aware_time = time(0, 1).replace(tzinfo=pendulum.local_timezone()) op = TimeSensorAsync(task_id="test", target_time=aware_time) assert op.target_datetime.tzinfo == timezone.utc @@ -77,7 +81,8 @@ def test_target_time_naive_dag_timezone(self): Tests that naive target_time gets converted correctly using the DAG's timezone. """ with DAG( - "test_target_time_naive_dag_timezone", + dag_id="test_target_time_naive_dag_timezone", + schedule=None, start_date=pendulum.datetime(2020, 1, 1, 0, 0, tz=DEFAULT_TIMEZONE), ): op = TimeSensorAsync(task_id="test", target_time=pendulum.time(9, 0)) diff --git a/tests/sensors/test_weekday_sensor.py b/tests/sensors/test_weekday_sensor.py index eaa2fa4ee661c..99c4e97c64805 100644 --- a/tests/sensors/test_weekday_sensor.py +++ b/tests/sensors/test_weekday_sensor.py @@ -17,6 +17,8 @@ # under the License. from __future__ import annotations +from datetime import timedelta + import pytest from airflow.exceptions import AirflowSensorTimeout @@ -62,7 +64,7 @@ def setup_method(self): self.clean_db() self.dagbag = DagBag(dag_folder=DEV_NULL, include_examples=True) self.args = {"owner": "airflow", "start_date": DEFAULT_DATE} - dag = DAG(TEST_DAG_ID, default_args=self.args) + dag = DAG(TEST_DAG_ID, schedule=timedelta(days=1), default_args=self.args) self.dag = dag def teardwon_method(self): diff --git a/tests/serialization/test_dag_serialization.py b/tests/serialization/test_dag_serialization.py index 8ef09349275ea..5e45351f3dbeb 100644 --- a/tests/serialization/test_dag_serialization.py +++ b/tests/serialization/test_dag_serialization.py @@ -274,6 +274,7 @@ def make_simple_dag(): """Make very simple DAG to verify serialization result.""" with DAG( dag_id="simple_dag", + schedule=timedelta(days=1), default_args={ "retries": 1, "retry_delay": timedelta(minutes=5), @@ -313,6 +314,7 @@ def compute_next_execution_date(dag, execution_date): default_args = {"start_date": datetime(2019, 7, 10)} dag = DAG( "user_defined_macro_filter_dag", + schedule=None, default_args=default_args, user_defined_macros={ "next_execution_date": compute_next_execution_date, @@ -737,7 +739,7 @@ def validate_deserialized_task( ], ) def test_deserialization_start_date(self, dag_start_date, task_start_date, expected_task_start_date): - dag = DAG(dag_id="simple_dag", start_date=dag_start_date) + dag = DAG(dag_id="simple_dag", schedule=None, start_date=dag_start_date) BaseOperator(task_id="simple_task", dag=dag, start_date=task_start_date) serialized_dag = SerializedDAG.to_dict(dag) @@ -753,7 +755,11 @@ def test_deserialization_start_date(self, dag_start_date, task_start_date, expec assert simple_task.start_date == expected_task_start_date def test_deserialization_with_dag_context(self): - with DAG(dag_id="simple_dag", start_date=datetime(2019, 8, 1, tzinfo=timezone.utc)) as dag: + with DAG( + dag_id="simple_dag", + schedule=None, + start_date=datetime(2019, 8, 1, tzinfo=timezone.utc), + ) as dag: BaseOperator(task_id="simple_task") # should not raise RuntimeError: dictionary changed size during iteration SerializedDAG.to_dict(dag) @@ -775,7 +781,7 @@ def test_deserialization_with_dag_context(self): ], ) def test_deserialization_end_date(self, dag_end_date, task_end_date, expected_task_end_date): - dag = DAG(dag_id="simple_dag", start_date=datetime(2019, 8, 1), end_date=dag_end_date) + dag = DAG(dag_id="simple_dag", schedule=None, start_date=datetime(2019, 8, 1), end_date=dag_end_date) BaseOperator(task_id="simple_task", dag=dag, end_date=task_end_date) serialized_dag = SerializedDAG.to_dict(dag) @@ -926,9 +932,9 @@ def test_dag_params_roundtrip(self, val, expected_val): RemovedInAirflow3Warning, match="The use of non-json-serializable params is deprecated and will be removed in a future release", ): - dag = DAG(dag_id="simple_dag", params=val) + dag = DAG(dag_id="simple_dag", schedule=None, params=val) else: - dag = DAG(dag_id="simple_dag", params=val) + dag = DAG(dag_id="simple_dag", schedule=None, params=val) BaseOperator(task_id="simple_task", dag=dag, start_date=datetime(2019, 8, 1)) serialized_dag_json = SerializedDAG.to_json(dag) @@ -960,12 +966,12 @@ def __init__(self, path: str): schema = {"type": "string", "pattern": r"s3:\/\/(.+?)\/(.+)"} super().__init__(default=path, schema=schema) - dag = DAG(dag_id="simple_dag", params={"path": S3Param("s3://my_bucket/my_path")}) + dag = DAG(dag_id="simple_dag", schedule=None, params={"path": S3Param("s3://my_bucket/my_path")}) with pytest.raises(SerializationError): SerializedDAG.to_dict(dag) - dag = DAG(dag_id="simple_dag") + dag = DAG(dag_id="simple_dag", schedule=None) BaseOperator( task_id="simple_task", dag=dag, @@ -1012,7 +1018,7 @@ def test_task_params_roundtrip(self, val, expected_val): """ Test that params work both on Serialized DAGs & Tasks """ - dag = DAG(dag_id="simple_dag") + dag = DAG(dag_id="simple_dag", schedule=None) if val and any([True for k, v in val.items() if isinstance(v, set)]): with pytest.warns( RemovedInAirflow3Warning, @@ -1139,7 +1145,7 @@ class MyOperator(BaseOperator): def execute(self, context: Context): pass - with DAG(dag_id="simple_dag", start_date=datetime(2019, 8, 1)) as dag: + with DAG(dag_id="simple_dag", schedule=None, start_date=datetime(2019, 8, 1)) as dag: MyOperator(task_id="blah") serialized_dag = SerializedDAG.to_dict(dag) @@ -1224,7 +1230,7 @@ def test_templated_fields_exist_in_serialized_dag(self, templated_field, expecte we want check that non-"basic" objects are turned in to strings after deserializing. """ - dag = DAG("test_serialized_template_fields", start_date=datetime(2019, 8, 1)) + dag = DAG("test_serialized_template_fields", schedule=None, start_date=datetime(2019, 8, 1)) with dag: BashOperator(task_id="test", bash_command=templated_field) @@ -1371,7 +1377,7 @@ def test_task_resources(self): execution_date = datetime(2020, 1, 1) task_id = "task1" - with DAG("test_task_resources", start_date=execution_date) as dag: + with DAG("test_task_resources", schedule=None, start_date=execution_date) as dag: task = EmptyOperator(task_id=task_id, resources={"cpus": 0.1, "ram": 2048}) SerializedDAG.validate_schema(SerializedDAG.to_dict(dag)) @@ -1387,7 +1393,7 @@ def test_task_group_serialization(self): """ execution_date = datetime(2020, 1, 1) - with DAG("test_task_group_serialization", start_date=execution_date) as dag: + with DAG("test_task_group_serialization", schedule=None, start_date=execution_date) as dag: task1 = EmptyOperator(task_id="task1") with TaskGroup("group234") as group234: _ = EmptyOperator(task_id="task2") @@ -1443,7 +1449,7 @@ def test_setup_teardown_tasks(self): """ execution_date = datetime(2020, 1, 1) - with DAG("test_task_group_setup_teardown_tasks", start_date=execution_date) as dag: + with DAG("test_task_group_setup_teardown_tasks", schedule=None, start_date=execution_date) as dag: EmptyOperator(task_id="setup").as_setup() EmptyOperator(task_id="teardown").as_teardown() @@ -1547,7 +1553,7 @@ def test_deps_sorted(self): from airflow.sensors.external_task import ExternalTaskSensor execution_date = datetime(2020, 1, 1) - with DAG(dag_id="test_deps_sorted", start_date=execution_date) as dag: + with DAG(dag_id="test_deps_sorted", schedule=None, start_date=execution_date) as dag: task1 = ExternalTaskSensor( task_id="task1", external_dag_id="external_dag_id", @@ -1577,7 +1583,11 @@ class DummyTask(BaseOperator): deps = frozenset([*BaseOperator.deps, DummyTriggerRule()]) execution_date = datetime(2020, 1, 1) - with DAG(dag_id="test_error_on_unregistered_ti_dep_serialization", start_date=execution_date) as dag: + with DAG( + dag_id="test_error_on_unregistered_ti_dep_serialization", + schedule=None, + start_date=execution_date, + ) as dag: DummyTask(task_id="task1") with pytest.raises(SerializationError): @@ -1586,7 +1596,11 @@ class DummyTask(BaseOperator): def test_error_on_unregistered_ti_dep_deserialization(self): from airflow.operators.empty import EmptyOperator - with DAG("test_error_on_unregistered_ti_dep_deserialization", start_date=datetime(2019, 8, 1)) as dag: + with DAG( + "test_error_on_unregistered_ti_dep_deserialization", + schedule=None, + start_date=datetime(2019, 8, 1), + ) as dag: EmptyOperator(task_id="task1") serialize_op = SerializedBaseOperator.serialize_operator(dag.task_dict["task1"]) serialize_op["deps"] = [ @@ -1605,7 +1619,7 @@ class DummyTask(BaseOperator): deps = frozenset([*BaseOperator.deps, CustomTestTriggerRule()]) execution_date = datetime(2020, 1, 1) - with DAG(dag_id="test_serialize_custom_ti_deps", start_date=execution_date) as dag: + with DAG(dag_id="test_serialize_custom_ti_deps", schedule=None, start_date=execution_date) as dag: DummyTask(task_id="task1") serialize_op = SerializedBaseOperator.serialize_operator(dag.task_dict["task1"]) @@ -1630,7 +1644,7 @@ class DummyTask(BaseOperator): ] def test_serialize_mapped_outlets(self): - with DAG(dag_id="d", start_date=datetime.now()): + with DAG(dag_id="d", schedule=None, start_date=datetime.now()): op = MockOperator.partial(task_id="x").expand(arg1=[1, 2]) assert op.inlets == [] @@ -1658,7 +1672,7 @@ class DerivedSensor(ExternalTaskSensor): execution_date = datetime(2020, 1, 1) for class_ in [ExternalTaskSensor, DerivedSensor]: - with DAG(dag_id="test_derived_dag_deps_sensor", start_date=execution_date) as dag: + with DAG(dag_id="test_derived_dag_deps_sensor", schedule=None, start_date=execution_date) as dag: task1 = class_( task_id="task1", external_dag_id="external_dag_id", @@ -1697,7 +1711,7 @@ def test_custom_dep_detector(self): from airflow.sensors.external_task import ExternalTaskSensor execution_date = datetime(2020, 1, 1) - with DAG(dag_id="test", start_date=execution_date) as dag: + with DAG(dag_id="test", schedule=None, start_date=execution_date) as dag: ExternalTaskSensor( task_id="task1", external_dag_id="external_dag_id", @@ -1894,7 +1908,7 @@ class DerivedOperator(TriggerDagRunOperator): execution_date = datetime(2020, 1, 1) for class_ in [TriggerDagRunOperator, DerivedOperator]: - with DAG(dag_id="test_derived_dag_deps_trigger", start_date=execution_date) as dag: + with DAG(dag_id="test_derived_dag_deps_trigger", schedule=None, start_date=execution_date) as dag: task1 = EmptyOperator(task_id="task1") task2 = class_( task_id="task2", @@ -1936,7 +1950,7 @@ def test_task_group_sorted(self): end """ execution_date = datetime(2020, 1, 1) - with DAG(dag_id="test_task_group_sorted", start_date=execution_date) as dag: + with DAG(dag_id="test_task_group_sorted", schedule=None, start_date=execution_date) as dag: start = EmptyOperator(task_id="start") with TaskGroup("task_group_up1") as task_group_up1: @@ -1990,7 +2004,7 @@ def test_edge_info_serialization(self): from airflow.operators.empty import EmptyOperator from airflow.utils.edgemodifier import Label - with DAG("test_edge_info_serialization", start_date=datetime(2020, 1, 1)) as dag: + with DAG("test_edge_info_serialization", schedule=None, start_date=datetime(2020, 1, 1)) as dag: task1 = EmptyOperator(task_id="task1") task2 = EmptyOperator(task_id="task2") task1 >> Label("test label") >> task2 @@ -2052,7 +2066,11 @@ def test_dag_on_success_callback_roundtrip(self, passed_success_callback, expect When the callback is not set, has_on_success_callback should not be stored in Serialized blob and so default to False on de-serialization """ - dag = DAG(dag_id="test_dag_on_success_callback_roundtrip", **passed_success_callback) + dag = DAG( + dag_id="test_dag_on_success_callback_roundtrip", + schedule=None, + **passed_success_callback, + ) BaseOperator(task_id="simple_task", dag=dag, start_date=datetime(2019, 8, 1)) serialized_dag = SerializedDAG.to_dict(dag) @@ -2080,7 +2098,7 @@ def test_dag_on_failure_callback_roundtrip(self, passed_failure_callback, expect When the callback is not set, has_on_failure_callback should not be stored in Serialized blob and so default to False on de-serialization """ - dag = DAG(dag_id="test_dag_on_failure_callback_roundtrip", **passed_failure_callback) + dag = DAG(dag_id="test_dag_on_failure_callback_roundtrip", schedule=None, **passed_failure_callback) BaseOperator(task_id="simple_task", dag=dag, start_date=datetime(2019, 8, 1)) serialized_dag = SerializedDAG.to_dict(dag) @@ -2246,7 +2264,7 @@ class TestOperator(BaseOperator): def execute(self, context: Context): pass - dag = DAG(dag_id="test_dag", start_date=datetime(2023, 11, 9)) + dag = DAG(dag_id="test_dag", schedule=None, start_date=datetime(2023, 11, 9)) with dag: task = TestOperator( @@ -2308,7 +2326,7 @@ def __init__(self, *args, **kwargs): def execute_complete(self): pass - dag = DAG(dag_id="test_dag", start_date=datetime(2023, 11, 9)) + dag = DAG(dag_id="test_dag", schedule=None, start_date=datetime(2023, 11, 9)) with dag: TestOperator(task_id="test_task_1") @@ -2440,7 +2458,7 @@ def test_operator_expand_xcomarg_serde(): from airflow.models.xcom_arg import PlainXComArg, XComArg from airflow.serialization.serialized_objects import _XComRef - with DAG("test-dag", start_date=datetime(2020, 1, 1)) as dag: + with DAG("test-dag", schedule=None, start_date=datetime(2020, 1, 1)) as dag: task1 = BaseOperator(task_id="op1") mapped = MockOperator.partial(task_id="task_2").expand(arg2=XComArg(task1)) @@ -2492,7 +2510,7 @@ def test_operator_expand_kwargs_literal_serde(strict): from airflow.models.xcom_arg import PlainXComArg, XComArg from airflow.serialization.serialized_objects import _XComRef - with DAG("test-dag", start_date=datetime(2020, 1, 1)) as dag: + with DAG("test-dag", schedule=None, start_date=datetime(2020, 1, 1)) as dag: task1 = BaseOperator(task_id="op1") mapped = MockOperator.partial(task_id="task_2").expand_kwargs( [{"a": "x"}, {"a": XComArg(task1)}], @@ -2550,7 +2568,7 @@ def test_operator_expand_kwargs_xcomarg_serde(strict): from airflow.models.xcom_arg import PlainXComArg, XComArg from airflow.serialization.serialized_objects import _XComRef - with DAG("test-dag", start_date=datetime(2020, 1, 1)) as dag: + with DAG("test-dag", schedule=None, start_date=datetime(2020, 1, 1)) as dag: task1 = BaseOperator(task_id="op1") mapped = MockOperator.partial(task_id="task_2").expand_kwargs(XComArg(task1), strict=strict) @@ -2613,7 +2631,7 @@ def test_operator_expand_deserialized_unmap(): @pytest.mark.db_test def test_sensor_expand_deserialized_unmap(): """Unmap a deserialized mapped sensor should be similar to deserializing a non-mapped sensor""" - dag = DAG(dag_id="hello", start_date=None) + dag = DAG(dag_id="hello", schedule=None, start_date=None) with dag: normal = BashSensor(task_id="a", bash_command=[1, 2], mode="reschedule") mapped = BashSensor.partial(task_id="b", mode="reschedule").expand(bash_command=[1, 2]) @@ -2637,7 +2655,7 @@ def test_task_resources_serde(): execution_date = datetime(2020, 1, 1) task_id = "task1" - with DAG("test_task_resources", start_date=execution_date) as _: + with DAG("test_task_resources", schedule=None, start_date=execution_date) as _: task = EmptyOperator(task_id=task_id, resources={"cpus": 0.1, "ram": 2048}) serialized = BaseSerialization.serialize(task) @@ -2654,7 +2672,7 @@ def test_taskflow_expand_serde(): from airflow.models.xcom_arg import XComArg from airflow.serialization.serialized_objects import _ExpandInputRef, _XComRef - with DAG("test-dag", start_date=datetime(2020, 1, 1)) as dag: + with DAG("test-dag", schedule=None, start_date=datetime(2020, 1, 1)) as dag: op1 = BaseOperator(task_id="op1") @task(retry_delay=30) @@ -2757,7 +2775,7 @@ def test_taskflow_expand_kwargs_serde(strict): from airflow.models.xcom_arg import XComArg from airflow.serialization.serialized_objects import _ExpandInputRef, _XComRef - with DAG("test-dag", start_date=datetime(2020, 1, 1)) as dag: + with DAG("test-dag", schedule=None, start_date=datetime(2020, 1, 1)) as dag: op1 = BaseOperator(task_id="op1") @task(retry_delay=30) @@ -2856,7 +2874,7 @@ def test_mapped_task_group_serde(): from airflow.models.expandinput import DictOfListsExpandInput from airflow.utils.task_group import MappedTaskGroup - with DAG("test-dag", start_date=datetime(2020, 1, 1)) as dag: + with DAG("test-dag", schedule=None, start_date=datetime(2020, 1, 1)) as dag: @task_group def tg(a: str) -> None: @@ -2909,7 +2927,7 @@ def __init__(self, inputs, **kwargs): def operator_extra_links(self): return (AirflowLink2(),) - with DAG("test-dag", start_date=datetime(2020, 1, 1)) as dag: + with DAG("test-dag", schedule=None, start_date=datetime(2020, 1, 1)) as dag: _DummyOperator.partial(task_id="task").expand(inputs=[1, 2, 3]) serialized_dag = SerializedBaseOperator.serialize(dag) assert serialized_dag[Encoding.VAR]["tasks"][0]["__var"] == { diff --git a/tests/system/providers/alibaba/example_adb_spark_batch.py b/tests/system/providers/alibaba/example_adb_spark_batch.py index 3deb1c94731ce..9f23693066aec 100644 --- a/tests/system/providers/alibaba/example_adb_spark_batch.py +++ b/tests/system/providers/alibaba/example_adb_spark_batch.py @@ -31,6 +31,7 @@ with DAG( dag_id=DAG_ID, start_date=datetime(2021, 1, 1), + schedule=None, default_args={"cluster_id": "your cluster", "rg_name": "your resource group", "region": "your region"}, max_active_runs=1, catchup=False, diff --git a/tests/system/providers/alibaba/example_adb_spark_sql.py b/tests/system/providers/alibaba/example_adb_spark_sql.py index beff440608bb9..fcfe4b896ccba 100644 --- a/tests/system/providers/alibaba/example_adb_spark_sql.py +++ b/tests/system/providers/alibaba/example_adb_spark_sql.py @@ -31,6 +31,7 @@ with DAG( dag_id=DAG_ID, start_date=datetime(2021, 1, 1), + schedule=None, default_args={"cluster_id": "your cluster", "rg_name": "your resource group", "region": "your region"}, max_active_runs=1, catchup=False, diff --git a/tests/system/providers/alibaba/example_oss_bucket.py b/tests/system/providers/alibaba/example_oss_bucket.py index 6a48f05e9587e..1e39d3eb45033 100644 --- a/tests/system/providers/alibaba/example_oss_bucket.py +++ b/tests/system/providers/alibaba/example_oss_bucket.py @@ -29,6 +29,7 @@ with DAG( dag_id=DAG_ID, start_date=datetime(2021, 1, 1), + schedule=None, default_args={"bucket_name": "your bucket", "region": "your region"}, max_active_runs=1, tags=["example"], diff --git a/tests/system/providers/alibaba/example_oss_object.py b/tests/system/providers/alibaba/example_oss_object.py index 002b23d9436c7..5b73fb1ba7a6a 100644 --- a/tests/system/providers/alibaba/example_oss_object.py +++ b/tests/system/providers/alibaba/example_oss_object.py @@ -35,6 +35,7 @@ with DAG( dag_id=DAG_ID, start_date=datetime(2021, 1, 1), + schedule=None, default_args={"bucket_name": "your bucket", "region": "your region"}, max_active_runs=1, tags=["example"], diff --git a/tests/system/providers/apache/kafka/example_dag_event_listener.py b/tests/system/providers/apache/kafka/example_dag_event_listener.py index 768673f62070c..24d8177ce8cab 100644 --- a/tests/system/providers/apache/kafka/example_dag_event_listener.py +++ b/tests/system/providers/apache/kafka/example_dag_event_listener.py @@ -69,6 +69,7 @@ def _producer_function(): dag_id="fizzbuzz-load-topic", description="Load Data to fizz_buzz topic", start_date=datetime(2022, 11, 1), + schedule=None, catchup=False, tags=["fizz-buzz"], ) as dag: diff --git a/tests/system/providers/databricks/example_databricks_workflow.py b/tests/system/providers/databricks/example_databricks_workflow.py index 6639708b532fb..e94e775a3c0a4 100644 --- a/tests/system/providers/databricks/example_databricks_workflow.py +++ b/tests/system/providers/databricks/example_databricks_workflow.py @@ -66,7 +66,7 @@ dag = DAG( dag_id="example_databricks_workflow", start_date=datetime(2022, 1, 1), - schedule_interval=None, + schedule=None, catchup=False, tags=["example", "databricks"], ) diff --git a/tests/system/providers/github/example_github.py b/tests/system/providers/github/example_github.py index 6d2ed2bf6412f..81a458021aa67 100644 --- a/tests/system/providers/github/example_github.py +++ b/tests/system/providers/github/example_github.py @@ -36,6 +36,7 @@ with DAG( DAG_ID, start_date=datetime(2021, 1, 1), + schedule=None, tags=["example"], catchup=False, ) as dag: diff --git a/tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query.py b/tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query.py index 0c43b1b60bdc7..c60fa6415abba 100644 --- a/tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query.py +++ b/tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query.py @@ -378,6 +378,7 @@ def cloud_sql_database_create_body(instance: str) -> dict[str, Any]: with DAG( dag_id=DAG_ID, start_date=datetime(2021, 1, 1), + schedule=None, catchup=False, tags=["example", "cloudsql", "postgres"], ) as dag: diff --git a/tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py b/tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py index 6acae9272a58c..e71c60297c3af 100644 --- a/tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py +++ b/tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py @@ -257,6 +257,7 @@ def cloud_sql_database_create_body(instance: str) -> dict[str, Any]: with DAG( dag_id=DAG_ID, start_date=datetime(2021, 1, 1), + schedule=None, catchup=False, tags=["example", "cloudsql", "postgres"], ) as dag: diff --git a/tests/system/providers/google/cloud/compute/example_compute_ssh_os_login.py b/tests/system/providers/google/cloud/compute/example_compute_ssh_os_login.py index d069d9236ea16..99e04fa3d3eb3 100644 --- a/tests/system/providers/google/cloud/compute/example_compute_ssh_os_login.py +++ b/tests/system/providers/google/cloud/compute/example_compute_ssh_os_login.py @@ -79,7 +79,7 @@ with DAG( DAG_ID, - schedule_interval="@once", + schedule="@once", start_date=datetime(2021, 1, 1), catchup=False, tags=["example", "compute-ssh", "os-login"], diff --git a/tests/system/providers/google/cloud/compute/example_compute_ssh_parallel.py b/tests/system/providers/google/cloud/compute/example_compute_ssh_parallel.py index c5ad143791b5c..b5964eed7dd5e 100644 --- a/tests/system/providers/google/cloud/compute/example_compute_ssh_parallel.py +++ b/tests/system/providers/google/cloud/compute/example_compute_ssh_parallel.py @@ -71,7 +71,7 @@ with DAG( DAG_ID, - schedule_interval="@once", + schedule="@once", start_date=datetime(2021, 1, 1), catchup=False, tags=["example", "compute-ssh-parallel"], diff --git a/tests/system/providers/google/cloud/dataflow/example_dataflow_sql.py b/tests/system/providers/google/cloud/dataflow/example_dataflow_sql.py index 2d771c3beb5b1..e1d8b807908cf 100644 --- a/tests/system/providers/google/cloud/dataflow/example_dataflow_sql.py +++ b/tests/system/providers/google/cloud/dataflow/example_dataflow_sql.py @@ -57,6 +57,7 @@ with DAG( dag_id=DAG_ID, start_date=datetime(2021, 1, 1), + schedule=None, catchup=False, tags=["example", "dataflow-sql"], ) as dag: diff --git a/tests/system/providers/google/cloud/datafusion/example_datafusion.py b/tests/system/providers/google/cloud/datafusion/example_datafusion.py index f287e95932e0f..de8ba99b6ffee 100644 --- a/tests/system/providers/google/cloud/datafusion/example_datafusion.py +++ b/tests/system/providers/google/cloud/datafusion/example_datafusion.py @@ -170,6 +170,7 @@ with DAG( DAG_ID, start_date=datetime(2021, 1, 1), + schedule=None, catchup=False, tags=["example", "datafusion"], ) as dag: diff --git a/tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py b/tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py index 2ba4ddd7ff8f5..b240276a0b91b 100644 --- a/tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py +++ b/tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py @@ -104,6 +104,7 @@ with DAG( dag_id=DAG_ID, start_date=datetime(2021, 1, 1), + schedule=None, catchup=False, tags=["example", "aws", "gcs", "transfer"], ) as dag: diff --git a/tests/system/providers/http/example_http.py b/tests/system/providers/http/example_http.py index 13c908ef4a66d..bf5d08f086c18 100644 --- a/tests/system/providers/http/example_http.py +++ b/tests/system/providers/http/example_http.py @@ -36,6 +36,7 @@ default_args={"retries": 1}, tags=["example"], start_date=datetime(2021, 1, 1), + schedule=None, catchup=False, ) diff --git a/tests/system/providers/influxdb/example_influxdb_query.py b/tests/system/providers/influxdb/example_influxdb_query.py index 57275f63a0b34..6a0c14781aaba 100644 --- a/tests/system/providers/influxdb/example_influxdb_query.py +++ b/tests/system/providers/influxdb/example_influxdb_query.py @@ -28,6 +28,7 @@ with DAG( DAG_ID, start_date=datetime(2021, 1, 1), + schedule=None, tags=["example"], catchup=False, ) as dag: diff --git a/tests/system/providers/microsoft/azure/example_azure_batch_operator.py b/tests/system/providers/microsoft/azure/example_azure_batch_operator.py index fa7dd4bc70368..85977f2a0a6e4 100644 --- a/tests/system/providers/microsoft/azure/example_azure_batch_operator.py +++ b/tests/system/providers/microsoft/azure/example_azure_batch_operator.py @@ -35,6 +35,7 @@ with DAG( dag_id="example_azure_batch", + schedule=None, start_date=datetime(2021, 1, 1), catchup=False, doc_md=__doc__, diff --git a/tests/system/providers/microsoft/azure/example_azure_cosmosdb.py b/tests/system/providers/microsoft/azure/example_azure_cosmosdb.py index d496ff42d80e0..d48d636f28dbf 100644 --- a/tests/system/providers/microsoft/azure/example_azure_cosmosdb.py +++ b/tests/system/providers/microsoft/azure/example_azure_cosmosdb.py @@ -41,6 +41,7 @@ dag_id=DAG_ID, default_args={"database_name": "airflow_example_db"}, start_date=datetime(2021, 1, 1), + schedule=None, catchup=False, doc_md=__doc__, tags=["example"], diff --git a/tests/system/providers/microsoft/azure/example_wasb_sensors.py b/tests/system/providers/microsoft/azure/example_wasb_sensors.py index a2f89fc2f881d..806a863cbfbc3 100644 --- a/tests/system/providers/microsoft/azure/example_wasb_sensors.py +++ b/tests/system/providers/microsoft/azure/example_wasb_sensors.py @@ -41,6 +41,7 @@ with DAG( "example_wasb_sensors", start_date=datetime(2022, 8, 8), + schedule=None, catchup=False, tags=["example"], ) as dag: diff --git a/tests/system/providers/mysql/example_mysql.py b/tests/system/providers/mysql/example_mysql.py index 0874e24b4bc7e..a890b7846ec95 100644 --- a/tests/system/providers/mysql/example_mysql.py +++ b/tests/system/providers/mysql/example_mysql.py @@ -33,6 +33,7 @@ with DAG( DAG_ID, start_date=datetime(2021, 1, 1), + schedule=None, default_args={"conn_id": "mysql_conn_id"}, tags=["example"], catchup=False, diff --git a/tests/system/providers/neo4j/example_neo4j.py b/tests/system/providers/neo4j/example_neo4j.py index 9422793405816..0aea16f736dba 100644 --- a/tests/system/providers/neo4j/example_neo4j.py +++ b/tests/system/providers/neo4j/example_neo4j.py @@ -33,6 +33,7 @@ with DAG( DAG_ID, start_date=datetime(2021, 1, 1), + schedule=None, tags=["example"], catchup=False, ) as dag: diff --git a/tests/system/providers/opsgenie/example_opsgenie_notifier.py b/tests/system/providers/opsgenie/example_opsgenie_notifier.py index 8d0847817bbd3..a9cdd70de0125 100644 --- a/tests/system/providers/opsgenie/example_opsgenie_notifier.py +++ b/tests/system/providers/opsgenie/example_opsgenie_notifier.py @@ -27,6 +27,7 @@ with DAG( "opsgenie_notifier", start_date=datetime(2023, 1, 1), + schedule=None, on_failure_callback=[send_opsgenie_notification(payload={"message": "Something went wrong!"})], ) as dag: BashOperator( diff --git a/tests/system/providers/redis/example_redis_publish.py b/tests/system/providers/redis/example_redis_publish.py index 5dbdb25abf277..9d50593c04003 100644 --- a/tests/system/providers/redis/example_redis_publish.py +++ b/tests/system/providers/redis/example_redis_publish.py @@ -46,6 +46,7 @@ with DAG( dag_id="redis_example", + schedule=None, default_args=default_args, ) as dag: # [START RedisPublishOperator_DAG] diff --git a/tests/system/providers/telegram/example_telegram.py b/tests/system/providers/telegram/example_telegram.py index 965a148284871..18d734f3c69e8 100644 --- a/tests/system/providers/telegram/example_telegram.py +++ b/tests/system/providers/telegram/example_telegram.py @@ -32,7 +32,7 @@ CONN_ID = "telegram_conn_id" CHAT_ID = "-3222103937" -with DAG(DAG_ID, start_date=datetime(2021, 1, 1), tags=["example"]) as dag: +with DAG(DAG_ID, start_date=datetime(2021, 1, 1), schedule=None, tags=["example"]) as dag: # [START howto_operator_telegram] send_message_telegram_task = TelegramOperator( diff --git a/tests/template/test_templater.py b/tests/template/test_templater.py index e1dd9bedb0b90..778ca275e881f 100644 --- a/tests/template/test_templater.py +++ b/tests/template/test_templater.py @@ -29,7 +29,7 @@ class TestTemplater: def test_get_template_env(self): # Test get_template_env when a DAG is provided templater = Templater() - dag = DAG(dag_id="test_dag", render_template_as_native_obj=True) + dag = DAG(dag_id="test_dag", schedule=None, render_template_as_native_obj=True) env = templater.get_template_env(dag) assert isinstance(env, jinja2.Environment) assert not env.sandboxed diff --git a/tests/ti_deps/deps/test_dagrun_exists_dep.py b/tests/ti_deps/deps/test_dagrun_exists_dep.py index ea4d54227b44b..a0c1d8d606099 100644 --- a/tests/ti_deps/deps/test_dagrun_exists_dep.py +++ b/tests/ti_deps/deps/test_dagrun_exists_dep.py @@ -35,7 +35,7 @@ def test_dagrun_doesnt_exist(self, mock_dagrun_find): """ Task instances without dagruns should fail this dep """ - dag = DAG("test_dag", max_active_runs=2) + dag = DAG("test_dag", schedule=None, max_active_runs=2) dagrun = DagRun(state=State.QUEUED) ti = Mock(task=Mock(dag=dag), get_dagrun=Mock(return_value=dagrun)) assert not DagrunRunningDep().is_met(ti=ti) diff --git a/tests/ti_deps/deps/test_prev_dagrun_dep.py b/tests/ti_deps/deps/test_prev_dagrun_dep.py index 499de24965f6c..ba6b1cc68ea30 100644 --- a/tests/ti_deps/deps/test_prev_dagrun_dep.py +++ b/tests/ti_deps/deps/test_prev_dagrun_dep.py @@ -17,6 +17,7 @@ # under the License. from __future__ import annotations +from datetime import timedelta from unittest.mock import ANY, Mock, patch import pytest @@ -32,6 +33,8 @@ pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] +START_DATE = convert_to_utc(datetime(2016, 1, 1)) + class TestPrevDagrunDep: def teardown_method(self): @@ -42,12 +45,12 @@ def test_first_task_run_of_new_task(self): The first task run of a new task in an old DAG should pass if the task has ignore_first_depends_on_past set to True. """ - dag = DAG("test_dag") + dag = DAG("test_dag", schedule=timedelta(days=1), start_date=START_DATE) old_task = BaseOperator( task_id="test_task", dag=dag, depends_on_past=True, - start_date=convert_to_utc(datetime(2016, 1, 1)), + start_date=START_DATE, wait_for_downstream=False, ) # Old DAG run will include only TaskInstance of old_task @@ -220,7 +223,7 @@ def test_dagrun_dep( ): task = BaseOperator( task_id="test_task", - dag=DAG("test_dag"), + dag=DAG("test_dag", schedule=timedelta(days=1), start_date=datetime(2016, 1, 1)), depends_on_past=depends_on_past, start_date=datetime(2016, 1, 1), wait_for_downstream=wait_for_downstream, diff --git a/tests/ti_deps/deps/test_task_concurrency.py b/tests/ti_deps/deps/test_task_concurrency.py index 43ae6d7e80b3c..eb5e5a36fa5fb 100644 --- a/tests/ti_deps/deps/test_task_concurrency.py +++ b/tests/ti_deps/deps/test_task_concurrency.py @@ -32,7 +32,7 @@ class TestTaskConcurrencyDep: def _get_task(self, **kwargs): - return BaseOperator(task_id="test_task", dag=DAG("test_dag"), **kwargs) + return BaseOperator(task_id="test_task", dag=DAG("test_dag", schedule=None), **kwargs) @pytest.mark.parametrize( "kwargs, num_running_tis, is_task_concurrency_dep_met", diff --git a/tests/triggers/test_external_task.py b/tests/triggers/test_external_task.py index 4989868e8a946..7bb41c34502d6 100644 --- a/tests/triggers/test_external_task.py +++ b/tests/triggers/test_external_task.py @@ -237,7 +237,7 @@ async def test_task_state_trigger_success(self, session): reaches an allowed state (i.e. SUCCESS). """ trigger_start_time = utcnow() - dag = DAG(self.DAG_ID, start_date=timezone.datetime(2022, 1, 1)) + dag = DAG(self.DAG_ID, schedule=None, start_date=timezone.datetime(2022, 1, 1)) dag_run = DagRun( dag_id=dag.dag_id, run_type="manual", @@ -426,7 +426,7 @@ async def test_dag_state_trigger(self, session): Assert that the DagStateTrigger only goes off on or after a DagRun reaches an allowed state (i.e. SUCCESS). """ - dag = DAG(self.DAG_ID, start_date=timezone.datetime(2022, 1, 1)) + dag = DAG(self.DAG_ID, schedule=None, start_date=timezone.datetime(2022, 1, 1)) dag_run = DagRun( dag_id=dag.dag_id, run_type="manual", diff --git a/tests/utils/test_dag_cycle.py b/tests/utils/test_dag_cycle.py index 51e7638a2c402..1cf607fd8ee50 100644 --- a/tests/utils/test_dag_cycle.py +++ b/tests/utils/test_dag_cycle.py @@ -30,13 +30,13 @@ class TestCycleTester: def test_cycle_empty(self): # test empty - dag = DAG("dag", start_date=DEFAULT_DATE, default_args={"owner": "owner1"}) + dag = DAG("dag", schedule=None, start_date=DEFAULT_DATE, default_args={"owner": "owner1"}) assert not check_cycle(dag) def test_cycle_single_task(self): # test single task - dag = DAG("dag", start_date=DEFAULT_DATE, default_args={"owner": "owner1"}) + dag = DAG("dag", schedule=None, start_date=DEFAULT_DATE, default_args={"owner": "owner1"}) with dag: EmptyOperator(task_id="A") @@ -44,7 +44,7 @@ def test_cycle_single_task(self): assert not check_cycle(dag) def test_semi_complex(self): - dag = DAG("dag", start_date=DEFAULT_DATE, default_args={"owner": "owner1"}) + dag = DAG("dag", schedule=None, start_date=DEFAULT_DATE, default_args={"owner": "owner1"}) # A -> B -> C # B -> D @@ -61,7 +61,7 @@ def test_semi_complex(self): def test_cycle_no_cycle(self): # test no cycle - dag = DAG("dag", start_date=DEFAULT_DATE, default_args={"owner": "owner1"}) + dag = DAG("dag", schedule=None, start_date=DEFAULT_DATE, default_args={"owner": "owner1"}) # A -> B -> C # B -> D @@ -82,7 +82,7 @@ def test_cycle_no_cycle(self): def test_cycle_loop(self): # test self loop - dag = DAG("dag", start_date=DEFAULT_DATE, default_args={"owner": "owner1"}) + dag = DAG("dag", schedule=None, start_date=DEFAULT_DATE, default_args={"owner": "owner1"}) # A -> A with dag: @@ -94,7 +94,7 @@ def test_cycle_loop(self): def test_cycle_downstream_loop(self): # test downstream self loop - dag = DAG("dag", start_date=DEFAULT_DATE, default_args={"owner": "owner1"}) + dag = DAG("dag", schedule=None, start_date=DEFAULT_DATE, default_args={"owner": "owner1"}) # A -> B -> C -> D -> E -> E with dag: @@ -114,7 +114,7 @@ def test_cycle_downstream_loop(self): def test_cycle_large_loop(self): # large loop - dag = DAG("dag", start_date=DEFAULT_DATE, default_args={"owner": "owner1"}) + dag = DAG("dag", schedule=None, start_date=DEFAULT_DATE, default_args={"owner": "owner1"}) # A -> B -> C -> D -> E -> A with dag: @@ -132,7 +132,7 @@ def test_cycle_large_loop(self): def test_cycle_arbitrary_loop(self): # test arbitrary loop - dag = DAG("dag", start_date=DEFAULT_DATE, default_args={"owner": "owner1"}) + dag = DAG("dag", schedule=None, start_date=DEFAULT_DATE, default_args={"owner": "owner1"}) # E-> A -> B -> F -> A # -> C -> F @@ -155,7 +155,7 @@ def test_cycle_arbitrary_loop(self): def test_cycle_task_group_with_edge_labels(self): # Test a cycle is not detected when Labels are used between tasks in Task Groups. - dag = DAG("dag", start_date=DEFAULT_DATE, default_args={"owner": "owner1"}) + dag = DAG("dag", schedule=None, start_date=DEFAULT_DATE, default_args={"owner": "owner1"}) with dag: with TaskGroup(group_id="group"): diff --git a/tests/utils/test_dot_renderer.py b/tests/utils/test_dot_renderer.py index b1bf2863f4022..5cb52696f19ce 100644 --- a/tests/utils/test_dot_renderer.py +++ b/tests/utils/test_dot_renderer.py @@ -66,7 +66,7 @@ def test_should_render_dag_dependencies(self): assert "task_2 -> dag_three" in dot.source def test_should_render_dag(self): - with DAG(dag_id="DAG_ID") as dag: + with DAG(dag_id="DAG_ID", schedule=None) as dag: task_1 = BashOperator(start_date=START_DATE, task_id="first", bash_command="echo 1") task_2 = BashOperator(start_date=START_DATE, task_id="second", bash_command="echo 1") task_3 = PythonOperator(start_date=START_DATE, task_id="third", python_callable=mock.MagicMock()) @@ -150,7 +150,7 @@ def test_should_render_dag_orientation(self, session, dag_maker): # Change orientation orientation = "LR" - dag = DAG(dag_id="DAG_ID", orientation=orientation) + dag = DAG(dag_id="DAG_ID", schedule=None, orientation=orientation) dot = dot_renderer.render_dag(dag, tis=tis) source = dot.source # Should render DAG title with orientation @@ -158,7 +158,7 @@ def test_should_render_dag_orientation(self, session, dag_maker): assert f"label=DAG_ID labelloc=t rankdir={orientation}" in source def test_render_task_group(self): - with DAG(dag_id="example_task_group", start_date=START_DATE) as dag: + with DAG(dag_id="example_task_group", schedule=None, start_date=START_DATE) as dag: start = EmptyOperator(task_id="start") with TaskGroup("section_1", tooltip="Tasks for section_1") as section_1: diff --git a/tests/utils/test_edgemodifier.py b/tests/utils/test_edgemodifier.py index f2177957167ca..89644180c27d4 100644 --- a/tests/utils/test_edgemodifier.py +++ b/tests/utils/test_edgemodifier.py @@ -44,7 +44,7 @@ def test_dag(): def f(task_id): return f"OP:{task_id}" - with DAG(dag_id="test_xcom_dag", default_args=DEFAULT_ARGS) as dag: + with DAG(dag_id="test_xcom_dag", schedule=None, default_args=DEFAULT_ARGS) as dag: operators = [PythonOperator(python_callable=f, task_id=f"test_op_{i}") for i in range(4)] return dag, operators @@ -56,7 +56,7 @@ def test_taskgroup_dag(): def f(task_id): return f"OP:{task_id}" - with DAG(dag_id="test_xcom_dag", default_args=DEFAULT_ARGS) as dag: + with DAG(dag_id="test_xcom_dag", schedule=None, default_args=DEFAULT_ARGS) as dag: op1 = PythonOperator(python_callable=f, task_id="test_op_1") op4 = PythonOperator(python_callable=f, task_id="test_op_4") with TaskGroup("group_1") as group: @@ -72,7 +72,7 @@ def test_complex_taskgroup_dag(): def f(task_id): return f"OP:{task_id}" - with DAG(dag_id="test_complex_dag", default_args=DEFAULT_ARGS) as dag: + with DAG(dag_id="test_complex_dag", schedule=None, default_args=DEFAULT_ARGS) as dag: with TaskGroup("group_1") as group: group_emp1 = EmptyOperator(task_id="group_empty1") group_emp2 = EmptyOperator(task_id="group_empty2") @@ -116,7 +116,7 @@ def test_multiple_taskgroups_dag(): def f(task_id): return f"OP:{task_id}" - with DAG(dag_id="test_multiple_task_group_dag", default_args=DEFAULT_ARGS) as dag: + with DAG(dag_id="test_multiple_task_group_dag", schedule=None, default_args=DEFAULT_ARGS) as dag: with TaskGroup("group1") as group1: group1_emp1 = EmptyOperator(task_id="group1_empty1") group1_emp2 = EmptyOperator(task_id="group1_empty2") diff --git a/tests/utils/test_log_handlers.py b/tests/utils/test_log_handlers.py index 8d0b96435b53d..da10034cb9370 100644 --- a/tests/utils/test_log_handlers.py +++ b/tests/utils/test_log_handlers.py @@ -98,7 +98,7 @@ def test_file_task_handler_when_ti_value_is_invalid(self): def task_callable(ti): ti.log.info("test") - dag = DAG("dag_for_testing_file_task_handler", start_date=DEFAULT_DATE) + dag = DAG("dag_for_testing_file_task_handler", schedule=None, start_date=DEFAULT_DATE) dagrun = dag.create_dagrun( run_type=DagRunType.MANUAL, state=State.RUNNING, @@ -151,7 +151,7 @@ def test_file_task_handler(self): def task_callable(ti): ti.log.info("test") - dag = DAG("dag_for_testing_file_task_handler", start_date=DEFAULT_DATE) + dag = DAG("dag_for_testing_file_task_handler", schedule=None, start_date=DEFAULT_DATE) dagrun = dag.create_dagrun( run_type=DagRunType.MANUAL, state=State.RUNNING, @@ -206,7 +206,7 @@ def test_file_task_handler_running(self): def task_callable(ti): ti.log.info("test") - dag = DAG("dag_for_testing_file_task_handler", start_date=DEFAULT_DATE) + dag = DAG("dag_for_testing_file_task_handler", schedule=None, start_date=DEFAULT_DATE) task = PythonOperator( task_id="task_for_testing_file_log_handler", python_callable=task_callable, @@ -406,7 +406,7 @@ def test_read_from_k8s_under_multi_namespace_mode( def task_callable(ti): ti.log.info("test") - with DAG("dag_for_testing_file_task_handler", start_date=DEFAULT_DATE) as dag: + with DAG("dag_for_testing_file_task_handler", schedule=None, start_date=DEFAULT_DATE) as dag: task = PythonOperator( task_id="task_for_testing_file_log_handler", python_callable=task_callable, diff --git a/tests/utils/test_sqlalchemy.py b/tests/utils/test_sqlalchemy.py index bd4a9763e1aa1..2e1b011c720ea 100644 --- a/tests/utils/test_sqlalchemy.py +++ b/tests/utils/test_sqlalchemy.py @@ -70,10 +70,7 @@ def test_utc_transformations(self): iso_date = start_date.isoformat() execution_date = start_date + datetime.timedelta(hours=1, days=1) - dag = DAG( - dag_id=dag_id, - start_date=start_date, - ) + dag = DAG(dag_id=dag_id, schedule=datetime.timedelta(days=1), start_date=start_date) dag.clear() run = dag.create_dagrun( @@ -104,7 +101,7 @@ def test_process_bind_param_naive(self): # naive start_date = datetime.datetime.now() - dag = DAG(dag_id=dag_id, start_date=start_date) + dag = DAG(dag_id=dag_id, start_date=start_date, schedule=datetime.timedelta(days=1)) dag.clear() with pytest.raises((ValueError, StatementError)): diff --git a/tests/utils/test_state.py b/tests/utils/test_state.py index e00ba36fe3897..3ed88555566c1 100644 --- a/tests/utils/test_state.py +++ b/tests/utils/test_state.py @@ -16,6 +16,8 @@ # under the License. from __future__ import annotations +from datetime import timedelta + import pytest from airflow.models.dag import DAG @@ -34,7 +36,7 @@ def test_dagrun_state_enum_escape(): referenced in DB query """ with create_session() as session: - dag = DAG(dag_id="test_dagrun_state_enum_escape", start_date=DEFAULT_DATE) + dag = DAG(dag_id="test_dagrun_state_enum_escape", schedule=timedelta(days=1), start_date=DEFAULT_DATE) dag.create_dagrun( run_type=DagRunType.SCHEDULED, state=DagRunState.QUEUED, diff --git a/tests/utils/test_task_group.py b/tests/utils/test_task_group.py index df593d5b787c1..084d8c35ac03e 100644 --- a/tests/utils/test_task_group.py +++ b/tests/utils/test_task_group.py @@ -172,7 +172,7 @@ def my_task(): def test_build_task_group_context_manager(): execution_date = pendulum.parse("20200101") - with DAG("test_build_task_group_context_manager", start_date=execution_date) as dag: + with DAG("test_build_task_group_context_manager", schedule=None, start_date=execution_date) as dag: task1 = EmptyOperator(task_id="task1") with TaskGroup("group234") as group234: _ = EmptyOperator(task_id="task2") @@ -209,7 +209,7 @@ def test_build_task_group(): as using context manager. """ execution_date = pendulum.parse("20200101") - dag = DAG("test_build_task_group", start_date=execution_date) + dag = DAG("test_build_task_group", schedule=None, start_date=execution_date) task1 = EmptyOperator(task_id="task1", dag=dag) group234 = TaskGroup("group234", dag=dag) _ = EmptyOperator(task_id="task2", dag=dag, task_group=group234) @@ -243,7 +243,7 @@ def test_build_task_group_with_prefix(): Tests that prefix_group_id turns on/off prefixing of task_id with group_id. """ execution_date = pendulum.parse("20200101") - with DAG("test_build_task_group_with_prefix", start_date=execution_date) as dag: + with DAG("test_build_task_group_with_prefix", schedule=None, start_date=execution_date) as dag: task1 = EmptyOperator(task_id="task1") with TaskGroup("group234", prefix_group_id=False) as group234: task2 = EmptyOperator(task_id="task2") @@ -326,7 +326,7 @@ def task_5(): print("task_5") execution_date = pendulum.parse("20200101") - with DAG("test_build_task_group_with_task_decorator", start_date=execution_date) as dag: + with DAG("test_build_task_group_with_task_decorator", schedule=None, start_date=execution_date) as dag: tsk_1 = task_1() with TaskGroup("group234") as group234: @@ -377,7 +377,7 @@ def test_sub_dag_task_group(): Tests dag.partial_subset() updates task_group correctly. """ execution_date = pendulum.parse("20200101") - with DAG("test_test_task_group_sub_dag", start_date=execution_date) as dag: + with DAG("test_test_task_group_sub_dag", schedule=None, start_date=execution_date) as dag: task1 = EmptyOperator(task_id="task1") with TaskGroup("group234") as group234: _ = EmptyOperator(task_id="task2") @@ -450,7 +450,7 @@ def test_sub_dag_task_group(): def test_dag_edges(): execution_date = pendulum.parse("20200101") - with DAG("test_dag_edges", start_date=execution_date) as dag: + with DAG("test_dag_edges", schedule=None, start_date=execution_date) as dag: task1 = EmptyOperator(task_id="task1") with TaskGroup("group_a") as group_a: with TaskGroup("group_b") as group_b: @@ -559,7 +559,7 @@ def test_dag_edges(): def test_dag_edges_setup_teardown(): execution_date = pendulum.parse("20200101") - with DAG("test_dag_edges", start_date=execution_date) as dag: + with DAG("test_dag_edges", schedule=None, start_date=execution_date) as dag: setup1 = EmptyOperator(task_id="setup1").as_setup() teardown1 = EmptyOperator(task_id="teardown1").as_teardown() @@ -592,7 +592,7 @@ def test_dag_edges_setup_teardown_nested(): execution_date = pendulum.parse("20200101") - with DAG(dag_id="s_t_dag", start_date=execution_date) as dag: + with DAG(dag_id="s_t_dag", schedule=None, start_date=execution_date) as dag: @task def test_task(): @@ -637,29 +637,29 @@ def test_duplicate_group_id(): execution_date = pendulum.parse("20200101") - with DAG("test_duplicate_group_id", start_date=execution_date): + with DAG("test_duplicate_group_id", schedule=None, start_date=execution_date): _ = EmptyOperator(task_id="task1") with pytest.raises(DuplicateTaskIdFound, match=r".* 'task1' .*"), TaskGroup("task1"): pass - with DAG("test_duplicate_group_id", start_date=execution_date): + with DAG("test_duplicate_group_id", schedule=None, start_date=execution_date): _ = EmptyOperator(task_id="task1") with TaskGroup("group1", prefix_group_id=False): with pytest.raises(DuplicateTaskIdFound, match=r".* 'group1' .*"), TaskGroup("group1"): pass - with DAG("test_duplicate_group_id", start_date=execution_date): + with DAG("test_duplicate_group_id", schedule=None, start_date=execution_date): with TaskGroup("group1", prefix_group_id=False): with pytest.raises(DuplicateTaskIdFound, match=r".* 'group1' .*"): _ = EmptyOperator(task_id="group1") - with DAG("test_duplicate_group_id", start_date=execution_date): + with DAG("test_duplicate_group_id", schedule=None, start_date=execution_date): _ = EmptyOperator(task_id="task1") with TaskGroup("group1"): with pytest.raises(DuplicateTaskIdFound, match=r".* 'group1.downstream_join_id' .*"): _ = EmptyOperator(task_id="downstream_join_id") - with DAG("test_duplicate_group_id", start_date=execution_date): + with DAG("test_duplicate_group_id", schedule=None, start_date=execution_date): _ = EmptyOperator(task_id="task1") with TaskGroup("group1"): with pytest.raises(DuplicateTaskIdFound, match=r".* 'group1.upstream_join_id' .*"): @@ -671,7 +671,7 @@ def test_task_without_dag(): Test that if a task doesn't have a DAG when it's being set as the relative of another task which has a DAG, the task should be added to the root TaskGroup of the other task's DAG. """ - dag = DAG(dag_id="test_task_without_dag", start_date=pendulum.parse("20200101")) + dag = DAG(dag_id="test_task_without_dag", schedule=None, start_date=pendulum.parse("20200101")) op1 = EmptyOperator(task_id="op1", dag=dag) op2 = EmptyOperator(task_id="op2") op3 = EmptyOperator(task_id="op3") @@ -743,7 +743,10 @@ def section_2(value2): execution_date = pendulum.parse("20201109") with DAG( - dag_id="example_nested_task_group_decorator", start_date=execution_date, tags=["example"] + dag_id="example_nested_task_group_decorator", + schedule=None, + start_date=execution_date, + tags=["example"], ) as dag: t_start = task_start() sec_1 = section_1(t_start) @@ -793,7 +796,7 @@ def test_build_task_group_depended_by_task(): from airflow.decorators import dag as dag_decorator, task - @dag_decorator(start_date=pendulum.now()) + @dag_decorator(schedule=None, start_date=pendulum.now()) def build_task_group_depended_by_task(): @task def task_start(): @@ -860,7 +863,12 @@ def section_a(value): return task_3(task_2(task_1(value))) execution_date = pendulum.parse("20201109") - with DAG(dag_id="example_task_group_decorator_mix", start_date=execution_date, tags=["example"]) as dag: + with DAG( + dag_id="example_task_group_decorator_mix", + schedule=None, + start_date=execution_date, + tags=["example"], + ) as dag: t_start = PythonOperator(task_id="task_start", python_callable=task_start, dag=dag) sec_1 = section_a(t_start.output) t_end = PythonOperator(task_id="task_end", python_callable=task_end, dag=dag) @@ -915,7 +923,12 @@ def section_2(value): return task_3(task_2(task_1(value))) execution_date = pendulum.parse("20201109") - with DAG(dag_id="example_task_group_decorator_mix", start_date=execution_date, tags=["example"]) as dag: + with DAG( + dag_id="example_task_group_decorator_mix", + schedule=None, + start_date=execution_date, + tags=["example"], + ) as dag: t_start = PythonOperator(task_id="task_start", python_callable=task_start, dag=dag) with TaskGroup("section_1", tooltip="section_1") as section_1: @@ -965,10 +978,9 @@ def test_default_args(): execution_date = pendulum.parse("20201109") with DAG( dag_id="example_task_group_default_args", + schedule=None, start_date=execution_date, - default_args={ - "owner": "dag", - }, + default_args={"owner": "dag"}, ): with TaskGroup("group1", default_args={"owner": "group"}): task_1 = EmptyOperator(task_id="task_1") @@ -1026,7 +1038,12 @@ def task_group3(): task_end() execution_date = pendulum.parse("20201109") - with DAG(dag_id="example_duplicate_task_group_id", start_date=execution_date, tags=["example"]) as dag: + with DAG( + dag_id="example_duplicate_task_group_id", + schedule=None, + start_date=execution_date, + tags=["example"], + ) as dag: task_group1() task_group2() task_group3() @@ -1077,7 +1094,12 @@ def task_group1(name: str): task_end() execution_date = pendulum.parse("20201109") - with DAG(dag_id="example_multi_call_task_groups", start_date=execution_date, tags=["example"]) as dag: + with DAG( + dag_id="example_multi_call_task_groups", + schedule=None, + start_date=execution_date, + tags=["example"], + ) as dag: task_group1("Call1") task_group1("Call2") @@ -1149,7 +1171,7 @@ def tg(): ... def test_decorator_multiple_use_task(): from airflow.decorators import task - @dag("test-dag", start_date=DEFAULT_DATE) + @dag("test-dag", schedule=None, start_date=DEFAULT_DATE) def _test_dag(): @task def t(): @@ -1173,7 +1195,7 @@ def tg(): def test_topological_sort1(): - dag = DAG("dag", start_date=DEFAULT_DATE, default_args={"owner": "owner1"}) + dag = DAG("dag", schedule=None, start_date=DEFAULT_DATE, default_args={"owner": "owner1"}) # A -> B # A -> C -> D @@ -1199,7 +1221,7 @@ def test_topological_sort1(): def test_topological_sort2(): - dag = DAG("dag", start_date=DEFAULT_DATE, default_args={"owner": "owner1"}) + dag = DAG("dag", schedule=None, start_date=DEFAULT_DATE, default_args={"owner": "owner1"}) # C -> (A u B) -> D # C -> E @@ -1235,7 +1257,7 @@ def test_topological_sort2(): def test_topological_nested_groups(): execution_date = pendulum.parse("20200101") - with DAG("test_dag_edges", start_date=execution_date) as dag: + with DAG("test_dag_edges", schedule=None, start_date=execution_date) as dag: task1 = EmptyOperator(task_id="task1") task5 = EmptyOperator(task_id="task5") with TaskGroup("group_a") as group_a: @@ -1270,7 +1292,7 @@ def nested_topo(group): def test_hierarchical_alphabetical_sort(): execution_date = pendulum.parse("20200101") - with DAG("test_dag_edges", start_date=execution_date) as dag: + with DAG("test_dag_edges", schedule=None, start_date=execution_date) as dag: task1 = EmptyOperator(task_id="task1") task5 = EmptyOperator(task_id="task5") with TaskGroup("group_c"): @@ -1312,7 +1334,7 @@ def nested(group): def test_topological_group_dep(): execution_date = pendulum.parse("20200101") - with DAG("test_dag_edges", start_date=execution_date) as dag: + with DAG("test_dag_edges", schedule=None, start_date=execution_date) as dag: task1 = EmptyOperator(task_id="task1") task6 = EmptyOperator(task_id="task6") with TaskGroup("group_a") as group_a: @@ -1346,7 +1368,7 @@ def nested_topo(group): def test_add_to_sub_group(): - with DAG("test_dag", start_date=pendulum.parse("20200101")): + with DAG("test_dag", schedule=None, start_date=pendulum.parse("20200101")): tg = TaskGroup("section") task = EmptyOperator(task_id="task") with pytest.raises(TaskAlreadyInTaskGroup) as ctx: @@ -1356,7 +1378,7 @@ def test_add_to_sub_group(): def test_add_to_another_group(): - with DAG("test_dag", start_date=pendulum.parse("20200101")): + with DAG("test_dag", schedule=None, start_date=pendulum.parse("20200101")): tg = TaskGroup("section_1") with TaskGroup("section_2"): task = EmptyOperator(task_id="task") @@ -1370,7 +1392,7 @@ def test_task_group_edge_modifier_chain(): from airflow.models.baseoperator import chain from airflow.utils.edgemodifier import Label - with DAG(dag_id="test", start_date=pendulum.DateTime(2022, 5, 20)) as dag: + with DAG(dag_id="test", schedule=None, start_date=pendulum.DateTime(2022, 5, 20)) as dag: start = EmptyOperator(task_id="sleep_3_seconds") with TaskGroup(group_id="group1") as tg: @@ -1394,7 +1416,7 @@ def test_task_group_edge_modifier_chain(): def test_mapped_task_group_id_prefix_task_id(): from tests.test_utils.mock_operators import MockOperator - with DAG(dag_id="d", start_date=DEFAULT_DATE) as dag: + with DAG(dag_id="d", schedule=None, start_date=DEFAULT_DATE) as dag: t1 = MockOperator.partial(task_id="t1").expand(arg1=[]) with TaskGroup("g"): t2 = MockOperator.partial(task_id="t2").expand(arg1=[]) @@ -1407,7 +1429,7 @@ def test_mapped_task_group_id_prefix_task_id(): def test_iter_tasks(): - with DAG("test_dag", start_date=pendulum.parse("20200101")) as dag: + with DAG("test_dag", schedule=None, start_date=pendulum.parse("20200101")) as dag: with TaskGroup("section_1") as tg1: EmptyOperator(task_id="task1") @@ -1444,6 +1466,7 @@ def test_iter_tasks(): def test_override_dag_default_args(): with DAG( dag_id="test_dag", + schedule=None, start_date=pendulum.parse("20200101"), default_args={ "retries": 1, @@ -1467,6 +1490,7 @@ def test_override_dag_default_args(): def test_override_dag_default_args_in_nested_tg(): with DAG( dag_id="test_dag", + schedule=None, start_date=pendulum.parse("20200101"), default_args={ "retries": 1, @@ -1491,6 +1515,7 @@ def test_override_dag_default_args_in_nested_tg(): def test_override_dag_default_args_in_multi_level_nested_tg(): with DAG( dag_id="test_dag", + schedule=None, start_date=pendulum.parse("20200101"), default_args={ "retries": 1, @@ -1520,7 +1545,7 @@ def test_override_dag_default_args_in_multi_level_nested_tg(): def test_task_group_arrow_with_setups_teardowns(): - with DAG(dag_id="hi", start_date=pendulum.datetime(2022, 1, 1)): + with DAG(dag_id="hi", schedule=None, start_date=pendulum.datetime(2022, 1, 1)): with TaskGroup(group_id="tg1") as tg1: s1 = BaseOperator(task_id="s1") w1 = BaseOperator(task_id="w1") @@ -1533,7 +1558,7 @@ def test_task_group_arrow_with_setups_teardowns(): def test_task_group_arrow_with_setup_group(): - with DAG(dag_id="setup_group_teardown_group", start_date=pendulum.now()): + with DAG(dag_id="setup_group_teardown_group", schedule=None, start_date=pendulum.now()): with TaskGroup("group_1") as g1: @setup @@ -1591,7 +1616,7 @@ def test_task_group_arrow_with_setup_group_deeper_setup(): When recursing upstream for a non-teardown leaf, we should ignore setups that are direct upstream of a teardown. """ - with DAG(dag_id="setup_group_teardown_group_2", start_date=pendulum.now()): + with DAG(dag_id="setup_group_teardown_group_2", schedule=None, start_date=pendulum.now()): with TaskGroup("group_1") as g1: @setup @@ -1635,7 +1660,7 @@ def work(): ... def test_task_group_with_invalid_arg_type_raises_error(): error_msg = "'ui_color' has an invalid type with value 123, expected type is " - with DAG(dag_id="dag_with_tg_invalid_arg_type"): + with DAG(dag_id="dag_with_tg_invalid_arg_type", schedule=None): with pytest.raises(TypeError, match=error_msg): with TaskGroup("group_1", ui_color=123): EmptyOperator(task_id="task1") @@ -1643,7 +1668,7 @@ def test_task_group_with_invalid_arg_type_raises_error(): @mock.patch("airflow.utils.task_group.validate_instance_args") def test_task_group_init_validates_arg_types(mock_validate_instance_args): - with DAG(dag_id="dag_with_tg_valid_arg_types"): + with DAG(dag_id="dag_with_tg_valid_arg_types", schedule=None): with TaskGroup("group_1", ui_color="red") as tg: EmptyOperator(task_id="task1") diff --git a/tests/utils/test_types.py b/tests/utils/test_types.py index 61a1f94c0818e..844dcb7d54efa 100644 --- a/tests/utils/test_types.py +++ b/tests/utils/test_types.py @@ -16,6 +16,8 @@ # under the License. from __future__ import annotations +from datetime import timedelta + import pytest from airflow.models.dag import DAG @@ -34,7 +36,7 @@ def test_runtype_enum_escape(): referenced in DB query """ with create_session() as session: - dag = DAG(dag_id="test_enum_dags", start_date=DEFAULT_DATE) + dag = DAG(dag_id="test_enum_dags", schedule=timedelta(days=1), start_date=DEFAULT_DATE) data_interval = dag.timetable.infer_manual_data_interval(run_after=DEFAULT_DATE) dag.create_dagrun( run_type=DagRunType.SCHEDULED, diff --git a/tests/www/views/test_views.py b/tests/www/views/test_views.py index ae18bdc943981..7a8826ac90c81 100644 --- a/tests/www/views/test_views.py +++ b/tests/www/views/test_views.py @@ -329,7 +329,7 @@ def test_mark_task_instance_state(test_app): clear_db_runs() start_date = datetime(2020, 1, 1) - with DAG("test_mark_task_instance_state", start_date=start_date) as dag: + with DAG("test_mark_task_instance_state", start_date=start_date, schedule="0 0 * * *") as dag: task_1 = EmptyOperator(task_id="task_1") task_2 = EmptyOperator(task_id="task_2") task_3 = EmptyOperator(task_id="task_3") @@ -420,7 +420,7 @@ def test_mark_task_group_state(test_app): clear_db_runs() start_date = datetime(2020, 1, 1) - with DAG("test_mark_task_group_state", start_date=start_date) as dag: + with DAG("test_mark_task_group_state", start_date=start_date, schedule="0 0 * * *") as dag: start = EmptyOperator(task_id="start") with TaskGroup("section_1", tooltip="Tasks for section_1") as section_1: diff --git a/tests/www/views/test_views_extra_links.py b/tests/www/views/test_views_extra_links.py index d5b70caba586d..852a3f79a275b 100644 --- a/tests/www/views/test_views_extra_links.py +++ b/tests/www/views/test_views_extra_links.py @@ -73,7 +73,7 @@ class DummyTestOperator(BaseOperator): @pytest.fixture(scope="module") def dag(): - return DAG("dag", start_date=DEFAULT_DATE) + return DAG("dag", start_date=DEFAULT_DATE, schedule="0 0 * * *") @pytest.fixture(scope="module") diff --git a/tests/www/views/test_views_home.py b/tests/www/views/test_views_home.py index 6d1a473f267f2..5393115041392 100644 --- a/tests/www/views/test_views_home.py +++ b/tests/www/views/test_views_home.py @@ -205,7 +205,7 @@ def _process_file(file_path): @pytest.fixture def working_dags(tmp_path): - dag_contents_template = "from airflow import DAG\ndag = DAG('{}', tags=['{}'])" + dag_contents_template = "from airflow import DAG\ndag = DAG('{}', schedule=None, tags=['{}'])" for dag_id, tag in zip(TEST_FILTER_DAG_IDS, TEST_TAGS): path = tmp_path / f"{dag_id}.py" path.write_text(dag_contents_template.format(dag_id, tag)) @@ -214,9 +214,9 @@ def working_dags(tmp_path): @pytest.fixture def working_dags_with_read_perm(tmp_path): - dag_contents_template = "from airflow import DAG\ndag = DAG('{}', tags=['{}'])" + dag_contents_template = "from airflow import DAG\ndag = DAG('{}', schedule=None, tags=['{}'])" dag_contents_template_with_read_perm = ( - "from airflow import DAG\ndag = DAG('{}', tags=['{}'], " + "from airflow import DAG\ndag = DAG('{}', schedule=None, tags=['{}'], " "access_control={{'role_single_dag':{{'can_read'}}}}) " ) for dag_id, tag in zip(TEST_FILTER_DAG_IDS, TEST_TAGS): @@ -230,9 +230,9 @@ def working_dags_with_read_perm(tmp_path): @pytest.fixture def working_dags_with_edit_perm(tmp_path): - dag_contents_template = "from airflow import DAG\ndag = DAG('{}', tags=['{}'])" + dag_contents_template = "from airflow import DAG\ndag = DAG('{}', schedule=None, tags=['{}'])" dag_contents_template_with_read_perm = ( - "from airflow import DAG\ndag = DAG('{}', tags=['{}'], " + "from airflow import DAG\ndag = DAG('{}', schedule=None, tags=['{}'], " "access_control={{'role_single_dag':{{'can_edit'}}}}) " ) for dag_id, tag in zip(TEST_FILTER_DAG_IDS, TEST_TAGS): @@ -266,7 +266,7 @@ def broken_dags_after_working(tmp_path): path = tmp_path / "all_in_one.py" contents = "from airflow import DAG\n" for i, dag_id in enumerate(TEST_FILTER_DAG_IDS): - contents += f"dag{i} = DAG('{dag_id}')\n" + contents += f"dag{i} = DAG('{dag_id}', schedule=None)\n" path.write_text(contents) _process_file(path) diff --git a/tests/www/views/test_views_rendered.py b/tests/www/views/test_views_rendered.py index 842f1010138d4..79dd4a57feef7 100644 --- a/tests/www/views/test_views_rendered.py +++ b/tests/www/views/test_views_rendered.py @@ -49,6 +49,7 @@ def dag(): return DAG( "testdag", start_date=DEFAULT_DATE, + schedule="0 0 * * *", user_defined_filters={"hello": lambda name: f"Hello {name}"}, user_defined_macros={"fullname": lambda fname, lname: f"{fname} {lname}"}, ) diff --git a/tests/www/views/test_views_tasks.py b/tests/www/views/test_views_tasks.py index d0e7c168e59a2..9496e13fd49cf 100644 --- a/tests/www/views/test_views_tasks.py +++ b/tests/www/views/test_views_tasks.py @@ -614,7 +614,9 @@ def test_delete_dag_button_for_dag_on_scheduler_only(admin_client, new_id_exampl @pytest.fixture def new_dag_to_delete(): - dag = DAG("new_dag_to_delete", is_paused_upon_creation=True) + dag = DAG( + "new_dag_to_delete", is_paused_upon_creation=True, schedule="0 * * * *", start_date=DEFAULT_DATE + ) session = settings.Session() dag.sync_to_db(session=session) return dag