diff --git a/.github/actions/breeze/action.yml b/.github/actions/breeze/action.yml index 39e87cd7d8b52..03908cd2e3cfc 100644 --- a/.github/actions/breeze/action.yml +++ b/.github/actions/breeze/action.yml @@ -21,7 +21,7 @@ description: 'Sets up Python and Breeze' inputs: python-version: description: 'Python version to use' - default: "3.9" + default: "3.10" use-uv: description: 'Whether to use uv tool' required: true diff --git a/.github/actions/prepare_all_ci_images/action.yml b/.github/actions/prepare_all_ci_images/action.yml index 76c00a72a3998..7b461f4e3e2db 100644 --- a/.github/actions/prepare_all_ci_images/action.yml +++ b/.github/actions/prepare_all_ci_images/action.yml @@ -34,14 +34,8 @@ runs: # TODO: Currently we cannot loop through the list of python versions and have dynamic list of # tasks. Instead we hardcode all possible python versions and they - but # this should be implemented in stash action as list of keys to download. - # That includes 3.8 - 3.12 as we are backporting it to v2-10-test branch + # That includes 3.9 - 3.12 as we are backporting it to v3-0-test branch # This is captured in https://github.com/apache/airflow/issues/45268 - - name: "Restore CI docker image ${{ inputs.platform }}:3.8" - uses: ./.github/actions/prepare_single_ci_image - with: - platform: ${{ inputs.platform }} - python: "3.8" - python-versions-list-as-string: ${{ inputs.python-versions-list-as-string }} - name: "Restore CI docker image ${{ inputs.platform }}:3.9" uses: ./.github/actions/prepare_single_ci_image with: diff --git a/.github/workflows/test-providers.yml b/.github/workflows/test-providers.yml index 525e8e8599a9d..eec3c04722744 100644 --- a/.github/workflows/test-providers.yml +++ b/.github/workflows/test-providers.yml @@ -126,10 +126,6 @@ jobs: breeze release-management generate-issue-content-providers --only-available-in-dist --disable-progress if: matrix.package-format == 'wheel' - - name: Remove Python 3.9-incompatible provider distributions - run: | - echo "Removing Python 3.9-incompatible provider: cloudant" - rm -vf dist/*cloudant* - name: "Generate source constraints from CI image" shell: bash run: > diff --git a/Dockerfile b/Dockerfile index 72a5693030960..2336543d42fab 100644 --- a/Dockerfile +++ b/Dockerfile @@ -48,7 +48,7 @@ ARG AIRFLOW_USER_HOME_DIR=/home/airflow # latest released version here ARG AIRFLOW_VERSION="3.0.2" -ARG PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" +ARG PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" # You can swap comments between those two args to test pip from the main version diff --git a/Dockerfile.ci b/Dockerfile.ci index beebd98f788ae..e9057ce19736e 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -16,7 +16,7 @@ # # WARNING: THIS DOCKERFILE IS NOT INTENDED FOR PRODUCTION USE OR DEPLOYMENT. # -ARG PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" +ARG PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" ############################################################################################## # This is the script image where we keep all inlined bash scripts needed in other segments diff --git a/README.md b/README.md index d3b31364b94cc..463b00efcabe2 100644 --- a/README.md +++ b/README.md @@ -101,7 +101,7 @@ Apache Airflow is tested with: | | Main version (dev) | Stable version (3.0.2) | |------------|------------------------|------------------------| -| Python | 3.9, 3.10, 3.11, 3.12 | 3.9, 3.10, 3.11, 3.12 | +| Python | 3.10, 3.11, 3.12 | 3.9, 3.10, 3.11, 3.12 | | Platform | AMD64/ARM64(\*) | AMD64/ARM64(\*) | | Kubernetes | 1.30, 1.31, 1.32, 1.33 | 1.30, 1.31, 1.32, 1.33 | | PostgreSQL | 13, 14, 15, 16, 17 | 13, 14, 15, 16, 17 | diff --git a/airflow-core/docs/extra-packages-ref.rst b/airflow-core/docs/extra-packages-ref.rst index 7a13cc39dcc08..47106fab2ed8d 100644 --- a/airflow-core/docs/extra-packages-ref.rst +++ b/airflow-core/docs/extra-packages-ref.rst @@ -164,7 +164,7 @@ with a consistent set of dependencies based on constraint files provided by Airf :substitutions: pip install apache-airflow[google,amazon,apache-spark]==|version| \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.9.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.10.txt" Note, that this will install providers in the versions that were released at the time of Airflow |version| release. You can later upgrade those providers manually if you want to use latest versions of the providers. diff --git a/airflow-core/docs/installation/installing-from-pypi.rst b/airflow-core/docs/installation/installing-from-pypi.rst index 1a70723842a60..b51b897b1344b 100644 --- a/airflow-core/docs/installation/installing-from-pypi.rst +++ b/airflow-core/docs/installation/installing-from-pypi.rst @@ -40,7 +40,7 @@ Typical command to install Airflow from scratch in a reproducible way from PyPI .. code-block:: bash - pip install "apache-airflow[celery]==|version|" --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.9.txt" + pip install "apache-airflow[celery]==|version|" --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.10.txt" Typically, you can add other dependencies and providers as separate command after the reproducible @@ -112,7 +112,7 @@ but you can pick your own set of extras and providers to install. .. code-block:: bash - pip install "apache-airflow[celery]==|version|" --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.9.txt" + pip install "apache-airflow[celery]==|version|" --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.10.txt" .. note:: @@ -143,7 +143,7 @@ performing dependency resolution. .. code-block:: bash - pip install "apache-airflow[celery]==|version|" --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.9.txt" + pip install "apache-airflow[celery]==|version|" --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.10.txt" pip install "apache-airflow==|version|" apache-airflow-providers-google==10.1.1 You can also downgrade or upgrade other dependencies this way - even if they are not compatible with @@ -151,7 +151,7 @@ those dependencies that are stored in the original constraints file: .. code-block:: bash - pip install "apache-airflow[celery]==|version|" --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.9.txt" + pip install "apache-airflow[celery]==|version|" --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.10.txt" pip install "apache-airflow[celery]==|version|" dbt-core==0.20.0 .. warning:: @@ -194,7 +194,7 @@ one provided by the community. .. code-block:: bash - pip install "apache-airflow[celery]==|version|" --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.9.txt" + pip install "apache-airflow[celery]==|version|" --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.10.txt" pip install "apache-airflow==|version|" dbt-core==0.20.0 pip freeze > my-constraints.txt diff --git a/airflow-core/docs/start.rst b/airflow-core/docs/start.rst index d0d0f1dd5f782..07af3ba73028f 100644 --- a/airflow-core/docs/start.rst +++ b/airflow-core/docs/start.rst @@ -28,7 +28,7 @@ This quick start guide will help you bootstrap an Airflow standalone instance on Officially supported installation methods is with``pip`. - Run ``pip install apache-airflow[EXTRAS]==AIRFLOW_VERSION --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-AIRFLOW_VERSION/constraints-PYTHON_VERSION.txt"``, for example ``pip install "apache-airflow[celery]==3.0.0" --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-3.0.0/constraints-3.9.txt"`` to install Airflow in a reproducible way. + Run ``pip install apache-airflow[EXTRAS]==AIRFLOW_VERSION --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-AIRFLOW_VERSION/constraints-PYTHON_VERSION.txt"``, for example ``pip install "apache-airflow[celery]==3.0.0" --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-3.0.0/constraints-3.10.txt"`` to install Airflow in a reproducible way. @@ -75,7 +75,7 @@ This quick start guide will help you bootstrap an Airflow standalone instance on PYTHON_VERSION="$(python -c 'import sys; print(f"{sys.version_info.major}.{sys.version_info.minor}")')" CONSTRAINT_URL="https://raw.githubusercontent.com/apache/airflow/constraints-${AIRFLOW_VERSION}/constraints-${PYTHON_VERSION}.txt" - # For example this would install 3.0.0 with python 3.9: https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.9.txt + # For example this would install 3.0.0 with python 3.10: https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.10.txt uv pip install "apache-airflow==${AIRFLOW_VERSION}" --constraint "${CONSTRAINT_URL}" diff --git a/airflow-core/hatch_build.py b/airflow-core/hatch_build.py index 438767bcd9d5d..d691e6c25cd1d 100644 --- a/airflow-core/hatch_build.py +++ b/airflow-core/hatch_build.py @@ -19,10 +19,10 @@ import logging import os import shutil -from collections.abc import Iterable +from collections.abc import Callable, Iterable from pathlib import Path from subprocess import run -from typing import Any, Callable +from typing import Any from hatchling.builders.config import BuilderConfig from hatchling.builders.plugin.interface import BuilderInterface diff --git a/airflow-core/src/airflow/api_fastapi/auth/tokens.py b/airflow-core/src/airflow/api_fastapi/auth/tokens.py index d56678f052811..dfcef7318313a 100644 --- a/airflow-core/src/airflow/api_fastapi/auth/tokens.py +++ b/airflow-core/src/airflow/api_fastapi/auth/tokens.py @@ -21,9 +21,9 @@ import time import uuid from base64 import urlsafe_b64encode -from collections.abc import Sequence +from collections.abc import Callable, Sequence from datetime import datetime -from typing import TYPE_CHECKING, Any, Callable, Literal, overload +from typing import TYPE_CHECKING, Any, Literal, overload import attrs import httpx @@ -68,7 +68,7 @@ def key_to_jwk_dict(key: AllowedKeys, kid: str | None = None): from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey, RSAPublicKey from jwt.algorithms import OKPAlgorithm, RSAAlgorithm - if isinstance(key, (RSAPrivateKey, Ed25519PrivateKey)): + if isinstance(key, RSAPrivateKey | Ed25519PrivateKey): key = key.public_key() if isinstance(key, RSAPublicKey): diff --git a/airflow-core/src/airflow/api_fastapi/common/parameters.py b/airflow-core/src/airflow/api_fastapi/common/parameters.py index 65b2c77b8959b..c6e3bc945b020 100644 --- a/airflow-core/src/airflow/api_fastapi/common/parameters.py +++ b/airflow-core/src/airflow/api_fastapi/common/parameters.py @@ -18,19 +18,16 @@ from __future__ import annotations from abc import ABC, abstractmethod -from collections.abc import Iterable +from collections.abc import Callable, Iterable from datetime import datetime from enum import Enum from typing import ( TYPE_CHECKING, Annotated, Any, - Callable, Generic, Literal, - Optional, TypeVar, - Union, overload, ) @@ -267,7 +264,7 @@ def __init__( self.filter_option: FilterOptionEnum = filter_option def to_orm(self, select: Select) -> Select: - if isinstance(self.value, (list, str)) and not self.value and self.skip_none: + if isinstance(self.value, list | str) and not self.value and self.skip_none: return select if self.value is None and self.skip_none: return select @@ -520,14 +517,14 @@ def depends_float( # Common Safe DateTime DateTimeQuery = Annotated[str, AfterValidator(_safe_parse_datetime)] -OptionalDateTimeQuery = Annotated[Union[str, None], AfterValidator(_safe_parse_datetime_optional)] +OptionalDateTimeQuery = Annotated[str | None, AfterValidator(_safe_parse_datetime_optional)] # DAG QueryLimit = Annotated[LimitFilter, Depends(LimitFilter.depends)] QueryOffset = Annotated[OffsetFilter, Depends(OffsetFilter.depends)] QueryPausedFilter = Annotated[ - FilterParam[Optional[bool]], - Depends(filter_param_factory(DagModel.is_paused, Optional[bool], filter_name="paused")), + FilterParam[bool | None], + Depends(filter_param_factory(DagModel.is_paused, bool | None, filter_name="paused")), ] QueryExcludeStaleFilter = Annotated[_ExcludeStaleFilter, Depends(_ExcludeStaleFilter.depends)] QueryDagIdPatternSearch = Annotated[ @@ -544,8 +541,8 @@ def depends_float( # DagRun QueryLastDagRunStateFilter = Annotated[ - FilterParam[Optional[DagRunState]], - Depends(filter_param_factory(DagRun.state, Optional[DagRunState], filter_name="last_dag_run_state")), + FilterParam[DagRunState | None], + Depends(filter_param_factory(DagRun.state, DagRunState | None, filter_name="last_dag_run_state")), ] @@ -696,8 +693,8 @@ def _optional_boolean(value: bool | None) -> bool | None: return value if value is not None else False -QueryIncludeUpstream = Annotated[Union[bool], AfterValidator(_optional_boolean)] -QueryIncludeDownstream = Annotated[Union[bool], AfterValidator(_optional_boolean)] +QueryIncludeUpstream = Annotated[bool, AfterValidator(_optional_boolean)] +QueryIncludeDownstream = Annotated[bool, AfterValidator(_optional_boolean)] state_priority: list[None | TaskInstanceState] = [ TaskInstanceState.FAILED, diff --git a/airflow-core/src/airflow/api_fastapi/common/router.py b/airflow-core/src/airflow/api_fastapi/common/router.py index 12d9fca072459..e01b8462ba8bf 100644 --- a/airflow-core/src/airflow/api_fastapi/common/router.py +++ b/airflow-core/src/airflow/api_fastapi/common/router.py @@ -17,7 +17,8 @@ from __future__ import annotations -from typing import Any, Callable +from collections.abc import Callable +from typing import Any from fastapi import APIRouter from fastapi.types import DecoratedCallable diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/pools.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/pools.py index 2e7ae13cfcdb2..4342d04c1e125 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/pools.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/pools.py @@ -17,7 +17,8 @@ from __future__ import annotations -from typing import Annotated, Callable +from collections.abc import Callable +from typing import Annotated from pydantic import BeforeValidator, Field diff --git a/airflow-core/src/airflow/api_fastapi/core_api/security.py b/airflow-core/src/airflow/api_fastapi/core_api/security.py index be428124f9db6..5b3036de0d46e 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/security.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/security.py @@ -16,8 +16,9 @@ # under the License. from __future__ import annotations +from collections.abc import Callable from pathlib import Path -from typing import TYPE_CHECKING, Annotated, Callable +from typing import TYPE_CHECKING, Annotated from urllib.parse import ParseResult, urljoin, urlparse from fastapi import Depends, HTTPException, Request, status diff --git a/airflow-core/src/airflow/api_fastapi/core_api/services/public/variables.py b/airflow-core/src/airflow/api_fastapi/core_api/services/public/variables.py index 0208ea1a0a5f7..00bbf543288fe 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/services/public/variables.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/services/public/variables.py @@ -64,7 +64,7 @@ def handle_bulk_create(self, action: BulkCreateAction, results: BulkActionRespon for variable in action.entities: if variable.key in create_keys: - should_serialize_json = isinstance(variable.value, (dict, list)) + should_serialize_json = isinstance(variable.value, dict | list) Variable.set( key=variable.key, value=variable.value, diff --git a/airflow-core/src/airflow/api_fastapi/core_api/services/ui/grid.py b/airflow-core/src/airflow/api_fastapi/core_api/services/ui/grid.py index a69cafb7bbbda..9c356e9c3ad84 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/services/ui/grid.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/services/ui/grid.py @@ -133,7 +133,7 @@ def get_child_task_map(parent_task_id: str, task_node_map: dict[str, dict[str, A def _count_tis(node: int | MappedTaskGroup | MappedOperator, run_id: str, session: SessionDep) -> int: - if not isinstance(node, (MappedTaskGroup, MappedOperator)): + if not isinstance(node, MappedTaskGroup | MappedOperator): return node with contextlib.suppress(NotFullyPopulated, NotMapped): return DBBaseOperator.get_mapped_ti_count(node, run_id=run_id, session=session) diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/taskinstance.py b/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/taskinstance.py index c43c931f3e28a..5afc2e75a7193 100644 --- a/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/taskinstance.py +++ b/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/taskinstance.py @@ -19,7 +19,7 @@ import uuid from datetime import timedelta from enum import Enum -from typing import Annotated, Any, Literal, Union +from typing import Annotated, Any, Literal from pydantic import ( AwareDatetime, @@ -213,14 +213,12 @@ def ti_state_discriminator(v: dict[str, str] | StrictBaseModel) -> str: # It is called "_terminal_" to avoid future conflicts if we added an actual state named "terminal" # and "_other_" is a catch-all for all other states that are not covered by the other schemas. TIStateUpdate = Annotated[ - Union[ - Annotated[TITerminalStatePayload, Tag("_terminal_")], - Annotated[TISuccessStatePayload, Tag("success")], - Annotated[TITargetStatePayload, Tag("_other_")], - Annotated[TIDeferredStatePayload, Tag("deferred")], - Annotated[TIRescheduleStatePayload, Tag("up_for_reschedule")], - Annotated[TIRetryStatePayload, Tag("up_for_retry")], - ], + Annotated[TITerminalStatePayload, Tag("_terminal_")] + | Annotated[TISuccessStatePayload, Tag("success")] + | Annotated[TITargetStatePayload, Tag("_other_")] + | Annotated[TIDeferredStatePayload, Tag("deferred")] + | Annotated[TIRescheduleStatePayload, Tag("up_for_reschedule")] + | Annotated[TIRetryStatePayload, Tag("up_for_retry")], Discriminator(ti_state_discriminator), ] diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/deps.py b/airflow-core/src/airflow/api_fastapi/execution_api/deps.py index c2161180dbb46..2648a64ffad7a 100644 --- a/airflow-core/src/airflow/api_fastapi/execution_api/deps.py +++ b/airflow-core/src/airflow/api_fastapi/execution_api/deps.py @@ -20,7 +20,7 @@ import sys import time -from typing import Any, Optional +from typing import Any import structlog import svcs @@ -55,8 +55,8 @@ class JWTBearer(HTTPBearer): def __init__( self, - path_param_name: Optional[str] = None, - required_claims: Optional[dict[str, Any]] = None, + path_param_name: str | None = None, + required_claims: dict[str, Any] | None = None, ): super().__init__(auto_error=False) self.path_param_name = path_param_name @@ -66,7 +66,7 @@ async def __call__( # type: ignore[override] self, request: Request, services=DepContainer, - ) -> Optional[TIToken]: + ) -> TIToken | None: creds = await super().__call__(request) if not creds: raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Missing auth token") diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/routes/task_instances.py b/airflow-core/src/airflow/api_fastapi/execution_api/routes/task_instances.py index e22d0a5f34d0d..0a57e604a6eb6 100644 --- a/airflow-core/src/airflow/api_fastapi/execution_api/routes/task_instances.py +++ b/airflow-core/src/airflow/api_fastapi/execution_api/routes/task_instances.py @@ -434,7 +434,7 @@ def _create_ti_state_update_query_and_update_state( dag_bag: DagBagDep, dag_id: str, ) -> tuple[Update, TaskInstanceState]: - if isinstance(ti_patch_payload, (TITerminalStatePayload, TIRetryStatePayload, TISuccessStatePayload)): + if isinstance(ti_patch_payload, TITerminalStatePayload | TIRetryStatePayload | TISuccessStatePayload): ti = session.get(TI, ti_id_str) updated_state = ti_patch_payload.state query = TI.duration_expression_update(ti_patch_payload.end_date, query, session.bind) diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/versions/v2025_05_20.py b/airflow-core/src/airflow/api_fastapi/execution_api/versions/v2025_05_20.py index f8039f0a4d3fb..d671c97dafd9e 100644 --- a/airflow-core/src/airflow/api_fastapi/execution_api/versions/v2025_05_20.py +++ b/airflow-core/src/airflow/api_fastapi/execution_api/versions/v2025_05_20.py @@ -17,8 +17,6 @@ from __future__ import annotations -from typing import Optional - from cadwyn import ResponseInfo, VersionChange, convert_response_to_previous_version_for, schema from airflow.api_fastapi.execution_api.datamodels.taskinstance import TIRunContext @@ -30,7 +28,7 @@ class DowngradeUpstreamMapIndexes(VersionChange): description = __doc__ instructions_to_migrate_to_previous_version = ( - schema(TIRunContext).field("upstream_map_indexes").had(type=Optional[dict[str, int]]), # type: ignore + schema(TIRunContext).field("upstream_map_indexes").had(type=dict[str, int] | None), # type: ignore ) @convert_response_to_previous_version_for(TIRunContext) # type: ignore[arg-type] diff --git a/airflow-core/src/airflow/callbacks/callback_requests.py b/airflow-core/src/airflow/callbacks/callback_requests.py index 8cf8c77035737..6c4c978b672de 100644 --- a/airflow-core/src/airflow/callbacks/callback_requests.py +++ b/airflow-core/src/airflow/callbacks/callback_requests.py @@ -16,7 +16,7 @@ # under the License. from __future__ import annotations -from typing import TYPE_CHECKING, Annotated, Literal, Union +from typing import TYPE_CHECKING, Annotated, Literal from pydantic import BaseModel, Field @@ -86,6 +86,6 @@ class DagCallbackRequest(BaseCallbackRequest): CallbackRequest = Annotated[ - Union[DagCallbackRequest, TaskCallbackRequest], + DagCallbackRequest | TaskCallbackRequest, Field(discriminator="type"), ] diff --git a/airflow-core/src/airflow/callbacks/pipe_callback_sink.py b/airflow-core/src/airflow/callbacks/pipe_callback_sink.py index 7f6a2883743df..f7aba11960c18 100644 --- a/airflow-core/src/airflow/callbacks/pipe_callback_sink.py +++ b/airflow-core/src/airflow/callbacks/pipe_callback_sink.py @@ -18,7 +18,8 @@ from __future__ import annotations import contextlib -from typing import TYPE_CHECKING, Callable +from collections.abc import Callable +from typing import TYPE_CHECKING from airflow.callbacks.base_callback_sink import BaseCallbackSink diff --git a/airflow-core/src/airflow/cli/cli_config.py b/airflow-core/src/airflow/cli/cli_config.py index 725fbe46a4330..31022e17219c4 100644 --- a/airflow-core/src/airflow/cli/cli_config.py +++ b/airflow-core/src/airflow/cli/cli_config.py @@ -24,8 +24,8 @@ import json import os import textwrap -from collections.abc import Iterable -from typing import Callable, NamedTuple, Union +from collections.abc import Callable, Iterable +from typing import NamedTuple import lazy_object_proxy @@ -932,7 +932,7 @@ class GroupCommand(NamedTuple): epilog: str | None = None -CLICommand = Union[ActionCommand, GroupCommand] +CLICommand = ActionCommand | GroupCommand ASSETS_COMMANDS = ( ActionCommand( diff --git a/airflow-core/src/airflow/cli/commands/daemon_utils.py b/airflow-core/src/airflow/cli/commands/daemon_utils.py index 409ff15e77fd8..c55c12b380461 100644 --- a/airflow-core/src/airflow/cli/commands/daemon_utils.py +++ b/airflow-core/src/airflow/cli/commands/daemon_utils.py @@ -18,7 +18,7 @@ import signal from argparse import Namespace -from typing import Callable +from collections.abc import Callable from daemon import daemon from daemon.pidfile import TimeoutPIDLockFile diff --git a/airflow-core/src/airflow/cli/commands/plugins_command.py b/airflow-core/src/airflow/cli/commands/plugins_command.py index 29dd75674afe0..f26546439e940 100644 --- a/airflow-core/src/airflow/cli/commands/plugins_command.py +++ b/airflow-core/src/airflow/cli/commands/plugins_command.py @@ -27,7 +27,7 @@ def _get_name(class_like_object) -> str: - if isinstance(class_like_object, (str, PluginsDirectorySource)): + if isinstance(class_like_object, str | PluginsDirectorySource): return str(class_like_object) if inspect.isclass(class_like_object): return class_like_object.__name__ diff --git a/airflow-core/src/airflow/cli/simple_table.py b/airflow-core/src/airflow/cli/simple_table.py index b8e4c6a7a41a0..01025f3d64762 100644 --- a/airflow-core/src/airflow/cli/simple_table.py +++ b/airflow-core/src/airflow/cli/simple_table.py @@ -18,8 +18,8 @@ import inspect import json -from collections.abc import Sequence -from typing import TYPE_CHECKING, Any, Callable +from collections.abc import Callable, Sequence +from typing import TYPE_CHECKING, Any from rich.box import ASCII_DOUBLE_HEAD from rich.console import Console @@ -84,7 +84,7 @@ def print_as_plain_table(self, data: list[dict]): print(output) def _normalize_data(self, value: Any, output: str) -> list | str | dict | None: - if isinstance(value, (tuple, list)): + if isinstance(value, tuple | list): if output == "table": return ",".join(str(self._normalize_data(x, output)) for x in value) return [self._normalize_data(x, output) for x in value] diff --git a/airflow-core/src/airflow/configuration.py b/airflow-core/src/airflow/configuration.py index a32f119c744a6..d2f2cce2dfbc4 100644 --- a/airflow-core/src/airflow/configuration.py +++ b/airflow-core/src/airflow/configuration.py @@ -39,7 +39,7 @@ from io import StringIO from json.decoder import JSONDecodeError from re import Pattern -from typing import IO, TYPE_CHECKING, Any, Union +from typing import IO, TYPE_CHECKING, Any from urllib.parse import urlsplit from packaging.version import parse as parse_version @@ -64,9 +64,9 @@ _SQLITE3_VERSION_PATTERN = re.compile(r"(?P^\d+(?:\.\d+)*)\D?.*$") -ConfigType = Union[str, int, float, bool] +ConfigType = str | int | float | bool ConfigOptionsDictType = dict[str, ConfigType] -ConfigSectionSourcesType = dict[str, Union[str, tuple[str, str]]] +ConfigSectionSourcesType = dict[str, str | tuple[str, str]] ConfigSourcesType = dict[str, ConfigSectionSourcesType] ENV_VAR_PREFIX = "AIRFLOW__" diff --git a/airflow-core/src/airflow/dag_processing/processor.py b/airflow-core/src/airflow/dag_processing/processor.py index 011393f22c886..d626338482420 100644 --- a/airflow-core/src/airflow/dag_processing/processor.py +++ b/airflow-core/src/airflow/dag_processing/processor.py @@ -19,8 +19,9 @@ import os import sys import traceback +from collections.abc import Callable from pathlib import Path -from typing import TYPE_CHECKING, Annotated, BinaryIO, Callable, ClassVar, Literal, Union +from typing import TYPE_CHECKING, Annotated, BinaryIO, ClassVar, Literal import attrs from pydantic import BaseModel, Field, TypeAdapter @@ -88,12 +89,12 @@ class DagFileParsingResult(BaseModel): ToManager = Annotated[ - Union[DagFileParsingResult, GetConnection, GetVariable, PutVariable, DeleteVariable], + DagFileParsingResult | GetConnection | GetVariable | PutVariable | DeleteVariable, Field(discriminator="type"), ] ToDagProcessor = Annotated[ - Union[DagFileParseRequest, ConnectionResult, VariableResult, ErrorResponse, OKResponse], + DagFileParseRequest | ConnectionResult | VariableResult | ErrorResponse | OKResponse, Field(discriminator="type"), ] diff --git a/airflow-core/src/airflow/exceptions.py b/airflow-core/src/airflow/exceptions.py index 045f9647ade76..2d12b0baabf32 100644 --- a/airflow-core/src/airflow/exceptions.py +++ b/airflow-core/src/airflow/exceptions.py @@ -476,7 +476,7 @@ def __init__( self.kwargs = kwargs self.timeout: timedelta | None # Check timeout type at runtime - if isinstance(timeout, (int, float)): + if isinstance(timeout, int | float): self.timeout = timedelta(seconds=timeout) else: self.timeout = timeout diff --git a/airflow-core/src/airflow/executors/base_executor.py b/airflow-core/src/airflow/executors/base_executor.py index 292ac685bac58..db42046d4cccf 100644 --- a/airflow-core/src/airflow/executors/base_executor.py +++ b/airflow-core/src/airflow/executors/base_executor.py @@ -23,7 +23,7 @@ from collections.abc import Sequence from dataclasses import dataclass, field from functools import cached_property -from typing import TYPE_CHECKING, Any, Optional +from typing import TYPE_CHECKING, Any import pendulum @@ -58,7 +58,7 @@ # Event_buffer dict value type # Tuple of: state, info - EventBufferValueType = tuple[Optional[str], Any] + EventBufferValueType = tuple[str | None, Any] log = logging.getLogger(__name__) diff --git a/airflow-core/src/airflow/executors/local_executor.py b/airflow-core/src/airflow/executors/local_executor.py index 4c8ca1e73cb93..eabb921136978 100644 --- a/airflow-core/src/airflow/executors/local_executor.py +++ b/airflow-core/src/airflow/executors/local_executor.py @@ -31,7 +31,7 @@ import multiprocessing.sharedctypes import os from multiprocessing import Queue, SimpleQueue -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING from setproctitle import setproctitle @@ -43,7 +43,7 @@ if TYPE_CHECKING: from sqlalchemy.orm import Session - TaskInstanceStateType = tuple[workloads.TaskInstance, TaskInstanceState, Optional[Exception]] + TaskInstanceStateType = tuple[workloads.TaskInstance, TaskInstanceState, Exception | None] def _run_worker( diff --git a/airflow-core/src/airflow/executors/workloads.py b/airflow-core/src/airflow/executors/workloads.py index 3da9fe3cede39..c8d5f789d7112 100644 --- a/airflow-core/src/airflow/executors/workloads.py +++ b/airflow-core/src/airflow/executors/workloads.py @@ -20,7 +20,7 @@ import uuid from datetime import datetime from pathlib import Path -from typing import TYPE_CHECKING, Annotated, Literal, Union +from typing import TYPE_CHECKING, Annotated, Literal import structlog from pydantic import BaseModel, Field @@ -160,6 +160,6 @@ class RunTrigger(BaseModel): All = Annotated[ - Union[ExecuteTask, RunTrigger], + ExecuteTask | RunTrigger, Field(discriminator="type"), ] diff --git a/airflow-core/src/airflow/io/__init__.py b/airflow-core/src/airflow/io/__init__.py index 01e0cfaabe2d5..6bbea93e59ba3 100644 --- a/airflow-core/src/airflow/io/__init__.py +++ b/airflow-core/src/airflow/io/__init__.py @@ -18,11 +18,10 @@ import inspect import logging -from collections.abc import Mapping +from collections.abc import Callable, Mapping from functools import cache from typing import ( TYPE_CHECKING, - Callable, ) from fsspec.implementations.local import LocalFileSystem diff --git a/airflow-core/src/airflow/jobs/job.py b/airflow-core/src/airflow/jobs/job.py index 9c06552abca21..7cd2180bbd7bd 100644 --- a/airflow-core/src/airflow/jobs/job.py +++ b/airflow-core/src/airflow/jobs/job.py @@ -17,9 +17,10 @@ # under the License. from __future__ import annotations +from collections.abc import Callable from functools import cached_property, lru_cache from time import sleep -from typing import TYPE_CHECKING, Callable, NoReturn +from typing import TYPE_CHECKING, NoReturn from sqlalchemy import Column, Index, Integer, String, case, select from sqlalchemy.exc import OperationalError diff --git a/airflow-core/src/airflow/jobs/scheduler_job_runner.py b/airflow-core/src/airflow/jobs/scheduler_job_runner.py index 0cc71bf19b482..50d6a5d63589e 100644 --- a/airflow-core/src/airflow/jobs/scheduler_job_runner.py +++ b/airflow-core/src/airflow/jobs/scheduler_job_runner.py @@ -25,12 +25,12 @@ import sys import time from collections import Counter, defaultdict, deque -from collections.abc import Collection, Iterable, Iterator +from collections.abc import Callable, Collection, Iterable, Iterator from contextlib import ExitStack from datetime import date, timedelta from functools import lru_cache, partial from itertools import groupby -from typing import TYPE_CHECKING, Any, Callable +from typing import TYPE_CHECKING, Any from sqlalchemy import and_, delete, exists, func, or_, select, text, tuple_, update from sqlalchemy.exc import OperationalError diff --git a/airflow-core/src/airflow/jobs/triggerer_job_runner.py b/airflow-core/src/airflow/jobs/triggerer_job_runner.py index d96cf7faacd8a..2811dcf89f38b 100644 --- a/airflow-core/src/airflow/jobs/triggerer_job_runner.py +++ b/airflow-core/src/airflow/jobs/triggerer_job_runner.py @@ -30,7 +30,7 @@ from datetime import datetime from socket import socket from traceback import format_exception -from typing import TYPE_CHECKING, Annotated, Any, ClassVar, Literal, TypedDict, Union +from typing import TYPE_CHECKING, Annotated, Any, ClassVar, Literal, TypedDict import attrs import structlog @@ -210,18 +210,16 @@ class TriggerStateSync(BaseModel): ToTriggerRunner = Annotated[ - Union[ - messages.StartTriggerer, - messages.TriggerStateSync, - ConnectionResult, - VariableResult, - XComResult, - DagRunStateResult, - DRCount, - TICount, - TaskStatesResult, - ErrorResponse, - ], + messages.StartTriggerer + | messages.TriggerStateSync + | ConnectionResult + | VariableResult + | XComResult + | DagRunStateResult + | DRCount + | TICount + | TaskStatesResult + | ErrorResponse, Field(discriminator="type"), ] """ @@ -231,16 +229,14 @@ class TriggerStateSync(BaseModel): ToTriggerSupervisor = Annotated[ - Union[ - messages.TriggerStateChanges, - GetConnection, - GetVariable, - GetXCom, - GetTICount, - GetTaskStates, - GetDagRunState, - GetDRCount, - ], + messages.TriggerStateChanges + | GetConnection + | GetVariable + | GetXCom + | GetTICount + | GetTaskStates + | GetDagRunState + | GetDRCount, Field(discriminator="type"), ] """ diff --git a/airflow-core/src/airflow/lineage/hook.py b/airflow-core/src/airflow/lineage/hook.py index 9aeb65c277147..712b778aef6aa 100644 --- a/airflow-core/src/airflow/lineage/hook.py +++ b/airflow-core/src/airflow/lineage/hook.py @@ -20,7 +20,7 @@ import hashlib import json from collections import defaultdict -from typing import TYPE_CHECKING, Union +from typing import TYPE_CHECKING, TypeAlias import attr @@ -33,7 +33,7 @@ from airflow.sdk import ObjectStoragePath # Store context what sent lineage. - LineageContext = Union[BaseHook, ObjectStoragePath] + LineageContext: TypeAlias = BaseHook | ObjectStoragePath _hook_lineage_collector: HookLineageCollector | None = None diff --git a/airflow-core/src/airflow/metrics/otel_logger.py b/airflow-core/src/airflow/metrics/otel_logger.py index a8ad9d8ce2eb1..317b70a5ca29d 100644 --- a/airflow-core/src/airflow/metrics/otel_logger.py +++ b/airflow-core/src/airflow/metrics/otel_logger.py @@ -20,7 +20,8 @@ import logging import random import warnings -from typing import TYPE_CHECKING, Callable, Union +from collections.abc import Callable +from typing import TYPE_CHECKING from opentelemetry import metrics from opentelemetry.exporter.otlp.proto.http.metric_exporter import OTLPMetricExporter @@ -47,7 +48,7 @@ log = logging.getLogger(__name__) -GaugeValues = Union[int, float] +GaugeValues = int | float DEFAULT_GAUGE_VALUE = 0.0 diff --git a/airflow-core/src/airflow/metrics/protocols.py b/airflow-core/src/airflow/metrics/protocols.py index 3c405aef37e7a..d170fe29ad9ba 100644 --- a/airflow-core/src/airflow/metrics/protocols.py +++ b/airflow-core/src/airflow/metrics/protocols.py @@ -19,12 +19,12 @@ import datetime import time -from typing import TYPE_CHECKING, Protocol, Union +from typing import TYPE_CHECKING, Protocol if TYPE_CHECKING: from airflow.typing_compat import Self -DeltaType = Union[int, float, datetime.timedelta] +DeltaType = int | float | datetime.timedelta class TimerProtocol(Protocol): diff --git a/airflow-core/src/airflow/metrics/statsd_logger.py b/airflow-core/src/airflow/metrics/statsd_logger.py index 8d47bc9ae132a..d952693eb230d 100644 --- a/airflow-core/src/airflow/metrics/statsd_logger.py +++ b/airflow-core/src/airflow/metrics/statsd_logger.py @@ -18,8 +18,9 @@ from __future__ import annotations import logging +from collections.abc import Callable from functools import wraps -from typing import TYPE_CHECKING, Callable, TypeVar, cast +from typing import TYPE_CHECKING, TypeVar, cast from airflow.configuration import conf from airflow.exceptions import AirflowConfigException diff --git a/airflow-core/src/airflow/metrics/validators.py b/airflow-core/src/airflow/metrics/validators.py index 85152426fdd17..252524455e022 100644 --- a/airflow-core/src/airflow/metrics/validators.py +++ b/airflow-core/src/airflow/metrics/validators.py @@ -24,10 +24,10 @@ import re import string import warnings -from collections.abc import Iterable +from collections.abc import Callable, Iterable from functools import partial, wraps from re import Pattern -from typing import Callable, cast +from typing import cast from airflow.configuration import conf from airflow.exceptions import InvalidStatsNameException diff --git a/airflow-core/src/airflow/models/asset.py b/airflow-core/src/airflow/models/asset.py index a345ebe7995d3..79cf8ef28a622 100644 --- a/airflow-core/src/airflow/models/asset.py +++ b/airflow-core/src/airflow/models/asset.py @@ -209,7 +209,7 @@ def __hash__(self): def __eq__(self, other: object) -> bool: from airflow.sdk.definitions.asset import AssetAlias - if isinstance(other, (self.__class__, AssetAlias)): + if isinstance(other, self.__class__ | AssetAlias): return self.name == other.name return NotImplemented @@ -306,7 +306,7 @@ def __init__(self, name: str = "", uri: str = "", **kwargs): def __eq__(self, other: object) -> bool: from airflow.sdk.definitions.asset import Asset - if isinstance(other, (self.__class__, Asset)): + if isinstance(other, self.__class__ | Asset): return self.name == other.name and self.uri == other.uri return NotImplemented diff --git a/airflow-core/src/airflow/models/dag.py b/airflow-core/src/airflow/models/dag.py index a188b4d66f8ea..a74a68ac2c493 100644 --- a/airflow-core/src/airflow/models/dag.py +++ b/airflow-core/src/airflow/models/dag.py @@ -22,13 +22,12 @@ import logging import re from collections import defaultdict -from collections.abc import Collection, Generator, Iterable, Sequence +from collections.abc import Callable, Collection, Generator, Iterable, Sequence from datetime import datetime, timedelta from functools import cache from typing import ( TYPE_CHECKING, Any, - Callable, TypeVar, Union, cast, @@ -122,14 +121,9 @@ TAG_MAX_LEN = 100 DagStateChangeCallback = Callable[[Context], None] -ScheduleInterval = Union[None, str, timedelta, relativedelta] +ScheduleInterval = None | str | timedelta | relativedelta -ScheduleArg = Union[ - ScheduleInterval, - Timetable, - BaseAsset, - Collection[Union["Asset", "AssetAlias"]], -] +ScheduleArg = ScheduleInterval | Timetable | BaseAsset | Collection[Union["Asset", "AssetAlias"]] class InconsistentDataInterval(AirflowException): @@ -469,7 +463,7 @@ def _upgrade_outdated_dag_access_control(access_control=None): for role, perms in access_control.items(): if packaging_version.parse(FAB_VERSION) >= packaging_version.parse("1.3.0"): updated_access_control[role] = updated_access_control.get(role, {}) - if isinstance(perms, (set, list)): + if isinstance(perms, set | list): # Support for old-style access_control where only the actions are specified updated_access_control[role][permissions.RESOURCE_DAG] = set(perms) else: @@ -547,7 +541,7 @@ def infer_automated_data_interval(self, logical_date: datetime) -> DataInterval: :meta private: """ timetable_type = type(self.timetable) - if issubclass(timetable_type, (NullTimetable, OnceTimetable, AssetTriggeredTimetable)): + if issubclass(timetable_type, NullTimetable | OnceTimetable | AssetTriggeredTimetable): return DataInterval.exact(timezone.coerce_datetime(logical_date)) start = timezone.coerce_datetime(logical_date) if issubclass(timetable_type, CronDataIntervalTimetable): @@ -965,7 +959,7 @@ def _get_task_instances( tis = tis.where(DagRun.logical_date <= end_date) if state: - if isinstance(state, (str, TaskInstanceState)): + if isinstance(state, str | TaskInstanceState): tis = tis.where(TaskInstance.state == state) elif len(state) == 1: tis = tis.where(TaskInstance.state == state[0]) diff --git a/airflow-core/src/airflow/models/dagbag.py b/airflow-core/src/airflow/models/dagbag.py index 83fc47444c179..df13c38581d76 100644 --- a/airflow-core/src/airflow/models/dagbag.py +++ b/airflow-core/src/airflow/models/dagbag.py @@ -448,7 +448,7 @@ def parse(mod_name, filepath): dagbag_import_timeout = settings.get_dagbag_import_timeout(filepath) - if not isinstance(dagbag_import_timeout, (int, float)): + if not isinstance(dagbag_import_timeout, int | float): raise TypeError( f"Value ({dagbag_import_timeout}) from get_dagbag_import_timeout must be int or float" ) @@ -520,7 +520,7 @@ def _process_modules(self, filepath, mods, file_last_changed_on_disk): from airflow.sdk import DAG as SDKDAG from airflow.sdk.definitions._internal.contextmanager import DagContext - top_level_dags = {(o, m) for m in mods for o in m.__dict__.values() if isinstance(o, (DAG, SDKDAG))} + top_level_dags = {(o, m) for m in mods for o in m.__dict__.values() if isinstance(o, DAG | SDKDAG)} top_level_dags.update(DagContext.autoregistered_dags) diff --git a/airflow-core/src/airflow/models/dagrun.py b/airflow-core/src/airflow/models/dagrun.py index a1126c2a9c3cc..1d73055a08668 100644 --- a/airflow-core/src/airflow/models/dagrun.py +++ b/airflow-core/src/airflow/models/dagrun.py @@ -21,14 +21,12 @@ import os import re from collections import defaultdict -from collections.abc import Iterable, Iterator, Sequence +from collections.abc import Callable, Iterable, Iterator, Sequence from typing import ( TYPE_CHECKING, Any, - Callable, NamedTuple, TypeVar, - Union, overload, ) @@ -978,16 +976,9 @@ def set_dagrun_span_attrs(self, span: Span | EmptySpan): if self._state == DagRunState.FAILED: span.set_attribute("airflow.dag_run.error", True) - attribute_value_type = Union[ - str, - bool, - int, - float, - Sequence[str], - Sequence[bool], - Sequence[int], - Sequence[float], - ] + attribute_value_type = ( + str | bool | int | float | Sequence[str] | Sequence[bool] | Sequence[int] | Sequence[float] + ) # Explicitly set the value type to Union[...] to avoid a mypy error. attributes: dict[str, attribute_value_type] = { diff --git a/airflow-core/src/airflow/models/expandinput.py b/airflow-core/src/airflow/models/expandinput.py index b126c6f24b07f..46b70be79a954 100644 --- a/airflow-core/src/airflow/models/expandinput.py +++ b/airflow-core/src/airflow/models/expandinput.py @@ -20,7 +20,7 @@ import functools import operator from collections.abc import Iterable, Sized -from typing import TYPE_CHECKING, Any, ClassVar, Union +from typing import TYPE_CHECKING, Any, ClassVar import attrs @@ -54,7 +54,7 @@ def _needs_run_time_resolution(v: OperatorExpandArgument) -> TypeGuard[MappedArgument | SchedulerXComArg]: from airflow.models.xcom_arg import SchedulerXComArg - return isinstance(v, (MappedArgument, SchedulerXComArg)) + return isinstance(v, MappedArgument | SchedulerXComArg) @attrs.define @@ -138,7 +138,7 @@ def get_total_map_length(self, run_id: str, *, session: Session) -> int: "list-of-dicts": SchedulerListOfDictsExpandInput, } -SchedulerExpandInput = Union[SchedulerDictOfListsExpandInput, SchedulerListOfDictsExpandInput] +SchedulerExpandInput = SchedulerDictOfListsExpandInput | SchedulerListOfDictsExpandInput def create_expand_input(kind: str, value: Any) -> SchedulerExpandInput: diff --git a/airflow-core/src/airflow/models/operator.py b/airflow-core/src/airflow/models/operator.py index b42823e4c2189..a42b21b0a2544 100644 --- a/airflow-core/src/airflow/models/operator.py +++ b/airflow-core/src/airflow/models/operator.py @@ -17,12 +17,10 @@ # under the License. from __future__ import annotations -from typing import Union - from airflow.models.baseoperator import BaseOperator from airflow.models.mappedoperator import MappedOperator -Operator = Union[BaseOperator, MappedOperator] +Operator = BaseOperator | MappedOperator __all__ = ["Operator"] diff --git a/airflow-core/src/airflow/models/serialized_dag.py b/airflow-core/src/airflow/models/serialized_dag.py index 71722b54adee6..a05c4e6520415 100644 --- a/airflow-core/src/airflow/models/serialized_dag.py +++ b/airflow-core/src/airflow/models/serialized_dag.py @@ -21,9 +21,9 @@ import logging import zlib -from collections.abc import Iterable, Iterator, Sequence +from collections.abc import Callable, Iterable, Iterator, Sequence from datetime import timedelta -from typing import TYPE_CHECKING, Any, Callable, Literal +from typing import TYPE_CHECKING, Any, Literal import sqlalchemy_jsonfield import uuid6 diff --git a/airflow-core/src/airflow/models/taskmap.py b/airflow-core/src/airflow/models/taskmap.py index f0fd4c0231b70..ff3b94a84dacf 100644 --- a/airflow-core/src/airflow/models/taskmap.py +++ b/airflow-core/src/airflow/models/taskmap.py @@ -137,7 +137,7 @@ def expand_mapped_task(cls, task, run_id: str, *, session: Session) -> tuple[Seq from airflow.sdk.definitions.mappedoperator import MappedOperator from airflow.settings import task_instance_mutation_hook - if not isinstance(task, (BaseOperator, MappedOperator)): + if not isinstance(task, BaseOperator | MappedOperator): raise RuntimeError( f"cannot expand unrecognized operator type {type(task).__module__}.{type(task).__name__}" ) diff --git a/airflow-core/src/airflow/providers_manager.py b/airflow-core/src/airflow/providers_manager.py index f6e876e8914a9..757957ad5d039 100644 --- a/airflow-core/src/airflow/providers_manager.py +++ b/airflow-core/src/airflow/providers_manager.py @@ -26,12 +26,12 @@ import logging import traceback import warnings -from collections.abc import MutableMapping +from collections.abc import Callable, MutableMapping from dataclasses import dataclass from functools import wraps from importlib.resources import files as resource_files from time import perf_counter -from typing import TYPE_CHECKING, Any, Callable, NamedTuple, TypeVar +from typing import TYPE_CHECKING, Any, NamedTuple, TypeVar from packaging.utils import canonicalize_name diff --git a/airflow-core/src/airflow/serialization/serde.py b/airflow-core/src/airflow/serialization/serde.py index 0268ad91206d8..664a8d7cd5657 100644 --- a/airflow-core/src/airflow/serialization/serde.py +++ b/airflow-core/src/airflow/serialization/serde.py @@ -26,7 +26,7 @@ from fnmatch import fnmatch from importlib import import_module from re import Pattern -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast import attr @@ -56,8 +56,8 @@ DEFAULT_VERSION = 0 T = TypeVar("T", bool, float, int, dict, list, str, tuple, set) -U = Union[bool, float, int, dict, list, str, tuple, set] -S = Union[list, tuple, set] +U = bool | float | int | dict | list | str | tuple | set +S = list | tuple | set _serializers: dict[str, ModuleType] = {} _deserializers: dict[str, ModuleType] = {} diff --git a/airflow-core/src/airflow/serialization/serialized_objects.py b/airflow-core/src/airflow/serialization/serialized_objects.py index e102de1c9928c..33676d58bec06 100644 --- a/airflow-core/src/airflow/serialization/serialized_objects.py +++ b/airflow-core/src/airflow/serialization/serialized_objects.py @@ -520,20 +520,13 @@ def deref(self, dag: DAG) -> SchedulerXComArg: # the type declarations in expandinput.py so we always remember to update # serialization logic when adding new ExpandInput variants. If you add things to # the unions, be sure to update _ExpandInputRef to match. -_ExpandInputOriginalValue = Union[ - # For .expand(**kwargs). - Mapping[str, Any], - # For expand_kwargs(arg). - XComArg, - Collection[Union[XComArg, Mapping[str, Any]]], -] -_ExpandInputSerializedValue = Union[ - # For .expand(**kwargs). - Mapping[str, Any], - # For expand_kwargs(arg). - _XComRef, - Collection[Union[_XComRef, Mapping[str, Any]]], -] +# Mapping[str, Any], For .expand(**kwargs). +# XComArg # For expand_kwargs(arg). +_ExpandInputOriginalValue = Mapping[str, Any] | XComArg | Collection[XComArg | Mapping[str, Any]] + +# Mapping[str, Any], For .expand(**kwargs). +# _XComRef For expand_kwargs(arg). +_ExpandInputSerializedValue = Mapping[str, Any] | _XComRef | Collection[_XComRef | Mapping[str, Any]] class _ExpandInputRef(NamedTuple): @@ -751,7 +744,7 @@ def serialize( return cls._encode(var.timestamp(), type_=DAT.DATETIME) elif isinstance(var, datetime.timedelta): return cls._encode(var.total_seconds(), type_=DAT.TIMEDELTA) - elif isinstance(var, (Timezone, FixedTimezone)): + elif isinstance(var, Timezone | FixedTimezone): return cls._encode(encode_timezone(var), type_=DAT.TIMEZONE) elif isinstance(var, relativedelta.relativedelta): return cls._encode(encode_relativedelta(var), type_=DAT.RELATIVEDELTA) @@ -760,7 +753,7 @@ def serialize( var._asdict(), type_=DAT.TASK_INSTANCE_KEY, ) - elif isinstance(var, (AirflowException, TaskDeferred)) and hasattr(var, "serialize"): + elif isinstance(var, AirflowException | TaskDeferred) and hasattr(var, "serialize"): exc_cls_name, args, kwargs = var.serialize() return cls._encode( cls.serialize( @@ -769,7 +762,7 @@ def serialize( ), type_=DAT.AIRFLOW_EXC_SER, ) - elif isinstance(var, (KeyError, AttributeError)): + elif isinstance(var, KeyError | AttributeError): return cls._encode( cls.serialize( { diff --git a/airflow-core/src/airflow/serialization/serializers/datetime.py b/airflow-core/src/airflow/serialization/serializers/datetime.py index 69058b8c02a8b..a99c690f3cb72 100644 --- a/airflow-core/src/airflow/serialization/serializers/datetime.py +++ b/airflow-core/src/airflow/serialization/serializers/datetime.py @@ -92,7 +92,7 @@ def deserialize(classname: str, version: int, data: dict | str) -> datetime.date if classname == qualname(DateTime) and isinstance(data, dict): return DateTime.fromtimestamp(float(data[TIMESTAMP]), tz=tz) - if classname == qualname(datetime.timedelta) and isinstance(data, (str, float)): + if classname == qualname(datetime.timedelta) and isinstance(data, str | float): return datetime.timedelta(seconds=float(data)) if classname == qualname(datetime.date) and isinstance(data, str): diff --git a/airflow-core/src/airflow/serialization/serializers/kubernetes.py b/airflow-core/src/airflow/serialization/serializers/kubernetes.py index faa2312ac7a81..908bd0dc29e17 100644 --- a/airflow-core/src/airflow/serialization/serializers/kubernetes.py +++ b/airflow-core/src/airflow/serialization/serializers/kubernetes.py @@ -43,7 +43,7 @@ def serialize(o: object) -> tuple[U, str, int, bool]: if not k8s: return "", "", 0, False - if isinstance(o, (k8s.V1Pod, k8s.V1ResourceRequirements)): + if isinstance(o, k8s.V1Pod | k8s.V1ResourceRequirements): from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator # We're running this in an except block, so we don't want it to fail diff --git a/airflow-core/src/airflow/serialization/serializers/numpy.py b/airflow-core/src/airflow/serialization/serializers/numpy.py index c31244c587844..6e317b47d3885 100644 --- a/airflow-core/src/airflow/serialization/serializers/numpy.py +++ b/airflow-core/src/airflow/serialization/serializers/numpy.py @@ -55,26 +55,24 @@ def serialize(o: object) -> tuple[U, str, int, bool]: name = qualname(o) if isinstance( o, - ( - np.int_, - np.intc, - np.intp, - np.int8, - np.int16, - np.int32, - np.int64, - np.uint8, - np.uint16, - np.uint32, - np.uint64, - ), + np.int_ + | np.intc + | np.intp + | np.int8 + | np.int16 + | np.int32 + | np.int64 + | np.uint8 + | np.uint16 + | np.uint32 + | np.uint64, ): return int(o), name, __version__, True if isinstance(o, np.bool_): return bool(o), name, __version__, True - if isinstance(o, (np.float16, np.float32, np.float64, np.complex64, np.complex128)): + if isinstance(o, np.float16 | np.float32 | np.float64 | np.complex64 | np.complex128): return float(o), name, __version__, True return "", "", 0, False diff --git a/airflow-core/src/airflow/serialization/serializers/timezone.py b/airflow-core/src/airflow/serialization/serializers/timezone.py index 9f2ef7cef65ac..ca2f25ed59010 100644 --- a/airflow-core/src/airflow/serialization/serializers/timezone.py +++ b/airflow-core/src/airflow/serialization/serializers/timezone.py @@ -70,7 +70,7 @@ def serialize(o: object) -> tuple[U, str, int, bool]: def deserialize(classname: str, version: int, data: object) -> Any: from airflow.utils.timezone import parse_timezone - if not isinstance(data, (str, int)): + if not isinstance(data, str | int): raise TypeError(f"{data} is not of type int or str but of {type(data)}") if version > __version__: diff --git a/airflow-core/src/airflow/settings.py b/airflow-core/src/airflow/settings.py index 08d7ac7af5ba7..689ce2e4e6819 100644 --- a/airflow-core/src/airflow/settings.py +++ b/airflow-core/src/airflow/settings.py @@ -25,8 +25,9 @@ import platform import sys import warnings +from collections.abc import Callable from importlib import metadata -from typing import TYPE_CHECKING, Any, Callable +from typing import TYPE_CHECKING, Any import pluggy from packaging.version import Version diff --git a/airflow-core/src/airflow/stats.py b/airflow-core/src/airflow/stats.py index 569bce480653c..6cb9229ab7388 100644 --- a/airflow-core/src/airflow/stats.py +++ b/airflow-core/src/airflow/stats.py @@ -19,7 +19,8 @@ import logging import socket -from typing import TYPE_CHECKING, Callable +from collections.abc import Callable +from typing import TYPE_CHECKING from airflow.configuration import conf from airflow.metrics.base_stats_logger import NoStatsLogger diff --git a/airflow-core/src/airflow/timetables/interval.py b/airflow-core/src/airflow/timetables/interval.py index f0de30e76de6e..7e68b9a01e624 100644 --- a/airflow-core/src/airflow/timetables/interval.py +++ b/airflow-core/src/airflow/timetables/interval.py @@ -17,7 +17,7 @@ from __future__ import annotations import datetime -from typing import TYPE_CHECKING, Any, Union +from typing import TYPE_CHECKING, Any from dateutil.relativedelta import relativedelta from pendulum import DateTime @@ -30,7 +30,7 @@ if TYPE_CHECKING: from airflow.timetables.base import TimeRestriction -Delta = Union[datetime.timedelta, relativedelta] +Delta = datetime.timedelta | relativedelta class _DataIntervalTimetable(Timetable): diff --git a/airflow-core/src/airflow/traces/tracer.py b/airflow-core/src/airflow/traces/tracer.py index 45ed924deda85..7ac1ea3abeaab 100644 --- a/airflow-core/src/airflow/traces/tracer.py +++ b/airflow-core/src/airflow/traces/tracer.py @@ -19,8 +19,9 @@ import logging import socket +from collections.abc import Callable from functools import wraps -from typing import TYPE_CHECKING, Any, Callable, Protocol +from typing import TYPE_CHECKING, Any, Protocol from airflow.configuration import conf diff --git a/airflow-core/src/airflow/triggers/base.py b/airflow-core/src/airflow/triggers/base.py index 2dfe6880786f6..490423da5fda2 100644 --- a/airflow-core/src/airflow/triggers/base.py +++ b/airflow-core/src/airflow/triggers/base.py @@ -21,7 +21,7 @@ from collections.abc import AsyncIterator from dataclasses import dataclass from datetime import timedelta -from typing import Annotated, Any, Union +from typing import Annotated, Any import structlog from pydantic import ( @@ -229,11 +229,9 @@ def trigger_event_discriminator(v): DiscrimatedTriggerEvent = Annotated[ - Union[ - Annotated[TriggerEvent, Tag("_event_")], - Annotated[TaskSuccessEvent, Tag(TaskInstanceState.SUCCESS)], - Annotated[TaskFailedEvent, Tag(TaskInstanceState.FAILED)], - Annotated[TaskSkippedEvent, Tag(TaskInstanceState.SKIPPED)], - ], + Annotated[TriggerEvent, Tag("_event_")] + | Annotated[TaskSuccessEvent, Tag(TaskInstanceState.SUCCESS)] + | Annotated[TaskFailedEvent, Tag(TaskInstanceState.FAILED)] + | Annotated[TaskSkippedEvent, Tag(TaskInstanceState.SKIPPED)], Discriminator(trigger_event_discriminator), ] diff --git a/airflow-core/src/airflow/typing_compat.py b/airflow-core/src/airflow/typing_compat.py index 91a37ae020fec..283537e7e07b4 100644 --- a/airflow-core/src/airflow/typing_compat.py +++ b/airflow-core/src/airflow/typing_compat.py @@ -37,12 +37,9 @@ if sys.version_info >= (3, 10, 1) or (3, 9, 8) <= sys.version_info < (3, 10): from typing import Literal else: - from typing_extensions import Literal # type: ignore[assignment] + from typing import Literal # type: ignore[assignment] -if sys.version_info >= (3, 10): - from typing import ParamSpec, TypeAlias, TypeGuard -else: - from typing_extensions import ParamSpec, TypeAlias, TypeGuard +from typing import ParamSpec, TypeAlias, TypeGuard if sys.version_info >= (3, 11): from typing import Self diff --git a/airflow-core/src/airflow/utils/cli.py b/airflow-core/src/airflow/utils/cli.py index 8bde3f9abc8c0..ac7723d29338c 100644 --- a/airflow-core/src/airflow/utils/cli.py +++ b/airflow-core/src/airflow/utils/cli.py @@ -29,8 +29,9 @@ import traceback import warnings from argparse import Namespace +from collections.abc import Callable from pathlib import Path -from typing import TYPE_CHECKING, Callable, TypeVar, cast +from typing import TYPE_CHECKING, TypeVar, cast from airflow import settings from airflow.dag_processing.bundles.manager import DagBundlesManager diff --git a/airflow-core/src/airflow/utils/cli_action_loggers.py b/airflow-core/src/airflow/utils/cli_action_loggers.py index c43c6571ad58b..cd53f2c91a2dc 100644 --- a/airflow-core/src/airflow/utils/cli_action_loggers.py +++ b/airflow-core/src/airflow/utils/cli_action_loggers.py @@ -26,7 +26,8 @@ import json import logging -from typing import TYPE_CHECKING, Callable +from collections.abc import Callable +from typing import TYPE_CHECKING from airflow.utils.session import NEW_SESSION, provide_session diff --git a/airflow-core/src/airflow/utils/db.py b/airflow-core/src/airflow/utils/db.py index 05315110e404d..ffbf85a2a8100 100644 --- a/airflow-core/src/airflow/utils/db.py +++ b/airflow-core/src/airflow/utils/db.py @@ -27,12 +27,11 @@ import sys import time import warnings -from collections.abc import Generator, Iterable, Iterator, Sequence +from collections.abc import Callable, Generator, Iterable, Iterator, Sequence from tempfile import gettempdir from typing import ( TYPE_CHECKING, Any, - Callable, Protocol, TypeVar, overload, diff --git a/airflow-core/src/airflow/utils/decorators.py b/airflow-core/src/airflow/utils/decorators.py index cedb7e532d7ea..8286872d3d264 100644 --- a/airflow-core/src/airflow/utils/decorators.py +++ b/airflow-core/src/airflow/utils/decorators.py @@ -18,7 +18,8 @@ from __future__ import annotations import sys -from typing import Callable, TypeVar +from collections.abc import Callable +from typing import TypeVar import libcst as cst diff --git a/airflow-core/src/airflow/utils/dot_renderer.py b/airflow-core/src/airflow/utils/dot_renderer.py index fc1685b68b61f..f54ea23935637 100644 --- a/airflow-core/src/airflow/utils/dot_renderer.py +++ b/airflow-core/src/airflow/utils/dot_renderer.py @@ -134,7 +134,7 @@ def _draw_nodes( node: DependencyMixin, parent_graph: graphviz.Digraph, states_by_task_id: dict[str, str] | None ) -> None: """Draw the node and its children on the given parent_graph recursively.""" - if isinstance(node, (BaseOperator, MappedOperator)): + if isinstance(node, BaseOperator | MappedOperator): _draw_task(node, parent_graph, states_by_task_id) else: if not isinstance(node, TaskGroup): diff --git a/airflow-core/src/airflow/utils/event_scheduler.py b/airflow-core/src/airflow/utils/event_scheduler.py index a1dff1078faed..88999ec69372f 100644 --- a/airflow-core/src/airflow/utils/event_scheduler.py +++ b/airflow-core/src/airflow/utils/event_scheduler.py @@ -17,8 +17,8 @@ # under the License. from __future__ import annotations +from collections.abc import Callable from sched import scheduler -from typing import Callable from airflow.utils.log.logging_mixin import LoggingMixin diff --git a/airflow-core/src/airflow/utils/helpers.py b/airflow-core/src/airflow/utils/helpers.py index 74b7dacd33de7..aa111cf07733b 100644 --- a/airflow-core/src/airflow/utils/helpers.py +++ b/airflow-core/src/airflow/utils/helpers.py @@ -21,9 +21,9 @@ import itertools import re import signal -from collections.abc import Generator, Iterable, Mapping, MutableMapping +from collections.abc import Callable, Generator, Iterable, Mapping, MutableMapping from functools import cache -from typing import TYPE_CHECKING, Any, Callable, TypeVar, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from urllib.parse import urljoin from lazy_object_proxy import Proxy @@ -300,7 +300,7 @@ def is_empty(x): for k, v in val.items(): if is_empty(v): continue - if isinstance(v, (list, dict)): + if isinstance(v, list | dict): new_val = prune_dict(v, mode=mode) if not is_empty(new_val): new_dict[k] = new_val @@ -312,7 +312,7 @@ def is_empty(x): for v in val: if is_empty(v): continue - if isinstance(v, (list, dict)): + if isinstance(v, list | dict): new_val = prune_dict(v, mode=mode) if not is_empty(new_val): new_list.append(new_val) diff --git a/airflow-core/src/airflow/utils/log/colored_log.py b/airflow-core/src/airflow/utils/log/colored_log.py index bd9763ce4553a..fa3c9d52c2609 100644 --- a/airflow-core/src/airflow/utils/log/colored_log.py +++ b/airflow-core/src/airflow/utils/log/colored_log.py @@ -58,7 +58,7 @@ def __init__(self, *args, **kwargs): @staticmethod def _color_arg(arg: Any) -> str | float | int: - if isinstance(arg, (int, float)): + if isinstance(arg, int | float): # In case of %d or %f formatting return arg return BOLD_ON + str(arg) + BOLD_OFF @@ -69,7 +69,7 @@ def _count_number_of_arguments_in_message(record: LogRecord) -> int: return len(matches) if matches else 0 def _color_record_args(self, record: LogRecord) -> LogRecord: - if isinstance(record.args, (tuple, list)): + if isinstance(record.args, tuple | list): record.args = tuple(self._color_arg(arg) for arg in record.args) elif isinstance(record.args, dict): if self._count_number_of_arguments_in_message(record) > 1: diff --git a/airflow-core/src/airflow/utils/log/file_task_handler.py b/airflow-core/src/airflow/utils/log/file_task_handler.py index 15339c00cbb69..84987d1a20110 100644 --- a/airflow-core/src/airflow/utils/log/file_task_handler.py +++ b/airflow-core/src/airflow/utils/log/file_task_handler.py @@ -22,12 +22,12 @@ import itertools import logging import os -from collections.abc import Iterable +from collections.abc import Callable, Iterable from contextlib import suppress from datetime import datetime from enum import Enum from pathlib import Path -from typing import TYPE_CHECKING, Any, Callable, Union +from typing import TYPE_CHECKING, Any from urllib.parse import urljoin import pendulum @@ -49,7 +49,7 @@ # These types are similar, but have distinct names to make processing them less error prone -LogMessages: TypeAlias = Union[list["StructuredLogMessage"], list[str]] +LogMessages: TypeAlias = list["StructuredLogMessage"] | list[str] """The log messages themselves, either in already sturcutured form, or a single string blob to be parsed later""" LogSourceInfo: TypeAlias = list[str] """Information _about_ the log fetching process for display to a user""" @@ -149,7 +149,7 @@ def _parse_log_lines( lines = itertools.chain.from_iterable(map(str.splitlines, lines)) # type: ignore[assignment,arg-type] # https://github.com/python/mypy/issues/8586 - for idx, line in enumerate[Union[str, StructuredLogMessage]](lines): + for idx, line in enumerate[str | StructuredLogMessage](lines): if line: try: if isinstance(line, StructuredLogMessage): diff --git a/airflow-core/src/airflow/utils/log/log_reader.py b/airflow-core/src/airflow/utils/log/log_reader.py index 4a36b3ef1b07e..6ce10e387e52b 100644 --- a/airflow-core/src/airflow/utils/log/log_reader.py +++ b/airflow-core/src/airflow/utils/log/log_reader.py @@ -20,7 +20,7 @@ import time from collections.abc import Iterator from functools import cached_property -from typing import TYPE_CHECKING, Any, Union +from typing import TYPE_CHECKING, Any from airflow.configuration import conf from airflow.utils.helpers import render_log_filename @@ -36,7 +36,7 @@ from airflow.models.taskinstancehistory import TaskInstanceHistory from airflow.typing_compat import TypeAlias -LogMessages: TypeAlias = Union[list[StructuredLogMessage], str] +LogMessages: TypeAlias = list[StructuredLogMessage] | str LogMetadata: TypeAlias = dict[str, Any] diff --git a/airflow-core/src/airflow/utils/module_loading.py b/airflow-core/src/airflow/utils/module_loading.py index 42a078f3437d8..49f9db05c9df8 100644 --- a/airflow-core/src/airflow/utils/module_loading.py +++ b/airflow-core/src/airflow/utils/module_loading.py @@ -19,8 +19,9 @@ import pkgutil import re +from collections.abc import Callable from importlib import import_module -from typing import TYPE_CHECKING, Callable +from typing import TYPE_CHECKING if TYPE_CHECKING: from types import ModuleType diff --git a/airflow-core/src/airflow/utils/operator_helpers.py b/airflow-core/src/airflow/utils/operator_helpers.py index bf340c1ace9bc..8b8d339b15790 100644 --- a/airflow-core/src/airflow/utils/operator_helpers.py +++ b/airflow-core/src/airflow/utils/operator_helpers.py @@ -18,8 +18,8 @@ from __future__ import annotations import inspect -from collections.abc import Collection, Mapping -from typing import Any, Callable, TypeVar +from collections.abc import Callable, Collection, Mapping +from typing import Any, TypeVar R = TypeVar("R") diff --git a/airflow-core/src/airflow/utils/providers_configuration_loader.py b/airflow-core/src/airflow/utils/providers_configuration_loader.py index 4aeb93edcb39a..4fd9243235de2 100644 --- a/airflow-core/src/airflow/utils/providers_configuration_loader.py +++ b/airflow-core/src/airflow/utils/providers_configuration_loader.py @@ -16,8 +16,9 @@ # under the License. from __future__ import annotations +from collections.abc import Callable from functools import wraps -from typing import Callable, TypeVar +from typing import TypeVar from airflow.typing_compat import ParamSpec diff --git a/airflow-core/src/airflow/utils/retries.py b/airflow-core/src/airflow/utils/retries.py index e885eaededcc4..4428db2a49382 100644 --- a/airflow-core/src/airflow/utils/retries.py +++ b/airflow-core/src/airflow/utils/retries.py @@ -18,8 +18,9 @@ import functools import logging +from collections.abc import Callable from inspect import signature -from typing import Callable, TypeVar, overload +from typing import TypeVar, overload from sqlalchemy.exc import DBAPIError diff --git a/airflow-core/src/airflow/utils/session.py b/airflow-core/src/airflow/utils/session.py index e6b04f06461de..ec0c2ac891f30 100644 --- a/airflow-core/src/airflow/utils/session.py +++ b/airflow-core/src/airflow/utils/session.py @@ -17,10 +17,10 @@ from __future__ import annotations import contextlib -from collections.abc import Generator +from collections.abc import Callable, Generator from functools import wraps from inspect import signature -from typing import TYPE_CHECKING, Callable, TypeVar, cast +from typing import TYPE_CHECKING, TypeVar, cast from airflow import settings from airflow.typing_compat import ParamSpec diff --git a/airflow-core/src/airflow/utils/setup_teardown.py b/airflow-core/src/airflow/utils/setup_teardown.py index b62e3ab298b40..d2273c0c3f705 100644 --- a/airflow-core/src/airflow/utils/setup_teardown.py +++ b/airflow-core/src/airflow/utils/setup_teardown.py @@ -114,7 +114,7 @@ def set_dependency( new_task: AbstractOperator | list[AbstractOperator], upstream=True, ): - if isinstance(new_task, (list, tuple)): + if isinstance(new_task, list | tuple): for task in new_task: cls._set_dependency(task, receiving_task, upstream) else: diff --git a/airflow-core/src/airflow/utils/sqlalchemy.py b/airflow-core/src/airflow/utils/sqlalchemy.py index 9e8cf12fba26c..8311547e7e998 100644 --- a/airflow-core/src/airflow/utils/sqlalchemy.py +++ b/airflow-core/src/airflow/utils/sqlalchemy.py @@ -163,13 +163,13 @@ def sanitize_for_serialization(obj: V1Pod): """ if obj is None: return None - if isinstance(obj, (float, bool, bytes, str, int)): + if isinstance(obj, float | bool | bytes | str | int): return obj if isinstance(obj, list): return [sanitize_for_serialization(sub_obj) for sub_obj in obj] if isinstance(obj, tuple): return tuple(sanitize_for_serialization(sub_obj) for sub_obj in obj) - if isinstance(obj, (datetime.datetime, datetime.date)): + if isinstance(obj, datetime.datetime | datetime.date): return obj.isoformat() if isinstance(obj, dict): diff --git a/airflow-core/src/airflow/utils/task_group.py b/airflow-core/src/airflow/utils/task_group.py index bcae2baa88d6d..3eaf7cdf2d034 100644 --- a/airflow-core/src/airflow/utils/task_group.py +++ b/airflow-core/src/airflow/utils/task_group.py @@ -19,9 +19,10 @@ from __future__ import annotations +from collections.abc import Callable from functools import cache from operator import methodcaller -from typing import TYPE_CHECKING, Callable +from typing import TYPE_CHECKING import airflow.sdk.definitions.taskgroup from airflow.configuration import conf diff --git a/airflow-core/tests/unit/always/test_project_structure.py b/airflow-core/tests/unit/always/test_project_structure.py index d26565456a9da..1f55eac96be6e 100644 --- a/airflow-core/tests/unit/always/test_project_structure.py +++ b/airflow-core/tests/unit/always/test_project_structure.py @@ -304,7 +304,7 @@ def get_imports_from_file(filepath: str): doc_node = ast.parse(content, filepath) import_names: set[str] = set() for current_node in ast.walk(doc_node): - if not isinstance(current_node, (ast.Import, ast.ImportFrom)): + if not isinstance(current_node, ast.Import | ast.ImportFrom): continue for alias in current_node.names: name = alias.name diff --git a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_variables.py b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_variables.py index b1270b7365efc..34874c1fcf13f 100644 --- a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_variables.py +++ b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_variables.py @@ -1095,7 +1095,7 @@ def test_bulk_create_entity_serialization( response = test_client.patch("/variables", json=actions) assert response.status_code == 200 - if isinstance(entity_value, (dict, list)): + if isinstance(entity_value, dict | list): retrieved_value_deserialized = Variable.get(entity_key, deserialize_json=True) assert retrieved_value_deserialized == entity_value retrieved_value_raw_string = Variable.get(entity_key, deserialize_json=False) diff --git a/airflow-core/tests/unit/cluster_policies/__init__.py b/airflow-core/tests/unit/cluster_policies/__init__.py index a5fb9cdb3f072..9e84386743ad9 100644 --- a/airflow-core/tests/unit/cluster_policies/__init__.py +++ b/airflow-core/tests/unit/cluster_policies/__init__.py @@ -18,8 +18,9 @@ from __future__ import annotations from abc import ABC +from collections.abc import Callable from datetime import timedelta -from typing import TYPE_CHECKING, Callable +from typing import TYPE_CHECKING from airflow.configuration import conf from airflow.exceptions import AirflowClusterPolicySkipDag, AirflowClusterPolicyViolation diff --git a/airflow-core/tests/unit/dag_processing/test_processor.py b/airflow-core/tests/unit/dag_processing/test_processor.py index 8d77da61cafb9..d4351039db186 100644 --- a/airflow-core/tests/unit/dag_processing/test_processor.py +++ b/airflow-core/tests/unit/dag_processing/test_processor.py @@ -21,8 +21,9 @@ import pathlib import sys import textwrap +from collections.abc import Callable from socket import socketpair -from typing import TYPE_CHECKING, Callable +from typing import TYPE_CHECKING from unittest.mock import MagicMock, patch import pytest diff --git a/airflow-core/tests/unit/serialization/test_dag_serialization.py b/airflow-core/tests/unit/serialization/test_dag_serialization.py index 1a3b55f0a892e..e78c40d3797d4 100644 --- a/airflow-core/tests/unit/serialization/test_dag_serialization.py +++ b/airflow-core/tests/unit/serialization/test_dag_serialization.py @@ -404,7 +404,7 @@ def collect_dags(dag_folder=None): "providers/*/*/tests/system/*/*/", ] else: - if isinstance(dag_folder, (list, tuple)): + if isinstance(dag_folder, list | tuple): patterns = dag_folder else: patterns = [dag_folder] @@ -723,7 +723,7 @@ def validate_deserialized_task( from airflow.sdk.definitions.mappedoperator import MappedOperator assert not isinstance(task, SerializedBaseOperator) - assert isinstance(task, (BaseOperator, MappedOperator)) + assert isinstance(task, BaseOperator | MappedOperator) # Every task should have a task_group property -- even if it's the DAG's root task group assert serialized_task.task_group diff --git a/airflow-ctl/docs/images/command_hashes.txt b/airflow-ctl/docs/images/command_hashes.txt index 9183296518702..73316282d987d 100644 --- a/airflow-ctl/docs/images/command_hashes.txt +++ b/airflow-ctl/docs/images/command_hashes.txt @@ -1,14 +1,14 @@ -main:649cfb54e45b91a69ad7ed19cb526e93 -assets:bd74e73e54641bac100b88ca29641df2 -auth:ef4122d3f5e4b2ac19cb0d3e12c8594b -backfills:e0cba4448d576d1b53ea79d6dcdbe035 -config:807fd4874d29702624b231a1e4ea0bc9 -connections:da4f6807ca2a265ed6d6e734b5355fe2 -dag:dab7c8aa1a62fa011b80bb7132bcc32a -dagrun:7b3e06a3664cc7ceb18457b4c0895532 -jobs:806174e6c9511db669705279ed6a00b9 -pools:2c17a4131b6481bd8fe9120982606db2 -providers:d053e6f17ff271e1e08942378344d27b -variables:cd3970589b2cb1e3ebd9a0b7f2ffdf4d -version:19f901e228111d8ba2ef47d8722f9b87 -auth login:348c25d49128b6007ac97dae2ef7563f +main:8c61e3718e03aca03d819dce9973d56d +assets:6368874514661d8ef3d58e3b925e4ec1 +auth:caba04038826ae684922b9a7c42fc523 +backfills:10f1ac9cf1aa90d47006f23fc73cef38 +config:30215b244ab1e713df78813011449122 +connections:7f251b3b6005583c685218a264b47587 +dag:74332efb98c229cc90de9aeb1e87d58e +dagrun:f4ca499bcfc98265c065d06bf685ce87 +jobs:3675ee646892500ea45b0a17bf52ca07 +pools:5b1694055cea03e20468258200a18212 +providers:beb417fce0c8a6cafcdab36e0e692579 +variables:0d276ede752bad613823821ec2a2d7cb +version:dd32fbd1740003c2aaa30cd6a201dacd +auth login:8bcfaaf886b64c3d2e81d7e1c767b4b2 diff --git a/airflow-ctl/docs/images/output_assets.svg b/airflow-ctl/docs/images/output_assets.svg index 6ede29cb7c855..a680d89a96005 100644 --- a/airflow-ctl/docs/images/output_assets.svg +++ b/airflow-ctl/docs/images/output_assets.svg @@ -19,139 +19,139 @@ font-weight: 700; } - .terminal-2173172358-matrix { + .terminal-2173367894-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2173172358-title { + .terminal-2173367894-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2173172358-r1 { fill: #c5c8c6 } + .terminal-2173367894-r1 { fill: #c5c8c6 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - Command: assets + Command: assets - + - - Usage: airflowctl assets [-h] COMMAND ... - -Perform Assets operations - -Positional Arguments: -  COMMAND -    create-event        Perform create_event operation -    delete-dag-queued-events -                        Perform delete_dag_queued_events operation -    delete-queued-event -                        Perform delete_queued_event operation -    delete-queued-events -                        Perform delete_queued_events operation -    get                 Perform get operation -    get-by-alias        Perform get_by_alias operation -    get-dag-queued-event -                        Perform get_dag_queued_event operation -    get-dag-queued-events -                        Perform get_dag_queued_events operation -    get-queued-events   Perform get_queued_events operation -    list                Perform list operation -    list-by-alias       Perform list_by_alias operation -    materialize         Perform materialize operation - -Optional Arguments: -  -h, --help            show this help message and exit + + Usage: airflowctl assets [-h] COMMAND ... + +Perform Assets operations + +Positional Arguments: +  COMMAND +    create-event        Perform create_event operation +    delete-dag-queued-events +                        Perform delete_dag_queued_events operation +    delete-queued-event +                        Perform delete_queued_event operation +    delete-queued-events +                        Perform delete_queued_events operation +    get                 Perform get operation +    get-by-alias        Perform get_by_alias operation +    get-dag-queued-event +                        Perform get_dag_queued_event operation +    get-dag-queued-events +                        Perform get_dag_queued_events operation +    get-queued-events   Perform get_queued_events operation +    list                Perform list operation +    list-by-alias       Perform list_by_alias operation +    materialize         Perform materialize operation + +Options: +  -h, --help            show this help message and exit diff --git a/airflow-ctl/docs/images/output_auth.svg b/airflow-ctl/docs/images/output_auth.svg index 6667db945c139..87dda63d8376d 100644 --- a/airflow-ctl/docs/images/output_auth.svg +++ b/airflow-ctl/docs/images/output_auth.svg @@ -19,83 +19,83 @@ font-weight: 700; } - .terminal-337171281-matrix { + .terminal-3729182497-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-337171281-title { + .terminal-3729182497-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-337171281-r1 { fill: #c5c8c6 } + .terminal-3729182497-r1 { fill: #c5c8c6 } - + - + - + - + - + - + - + - + - + - + - + - + - Command: auth + Command: auth - + - - Usage: airflowctl auth [-h] COMMAND ... - -Manage authentication for CLI. Either pass token from environment  -variable/parameter or pass username and password. - -Positional Arguments: -  COMMAND -    login     Login to the metadata database for personal usage. JWT Token must  -be provided via parameter. - -Optional Arguments: -  -h, --help  show this help message and exit + + Usage: airflowctl auth [-h] COMMAND ... + +Manage authentication for CLI. Either pass token from environment  +variable/parameter or pass username and password. + +Positional Arguments: +  COMMAND +    login     Login to the metadata database for personal usage. JWT Token must  +be provided via parameter. + +Options: +  -h, --help  show this help message and exit diff --git a/airflow-ctl/docs/images/output_auth_login.svg b/airflow-ctl/docs/images/output_auth_login.svg index 5c39720038046..d023348b4b544 100644 --- a/airflow-ctl/docs/images/output_auth_login.svg +++ b/airflow-ctl/docs/images/output_auth_login.svg @@ -19,95 +19,95 @@ font-weight: 700; } - .terminal-493302485-matrix { + .terminal-452800165-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-493302485-title { + .terminal-452800165-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-493302485-r1 { fill: #c5c8c6 } + .terminal-452800165-r1 { fill: #c5c8c6 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - Command: auth login + Command: auth login - + - - Usage: airflowctl auth login [-h] [--api-token API_TOKEN] [--api-url API_URL] -                             [-e ENV] [--password [PASSWORD]] -                             [--username USERNAME] - -Login to the metadata database - -Optional Arguments: -  -h, --help            show this help message and exit -  --api-token API_TOKEN -                        The token to use for authentication -  --api-url API_URL     The URL of the metadata database API -  -e, --env ENV         The environment to run the command in -  --password [PASSWORD] -                        The password to use for authentication -  --username USERNAME   The username to use for authentication + + Usage: airflowctl auth login [-h] [--api-token API_TOKEN] [--api-url API_URL] +                             [-e ENV] [--password [PASSWORD]] +                             [--username USERNAME] + +Login to the metadata database + +Options: +  -h, --help            show this help message and exit +  --api-token API_TOKEN +                        The token to use for authentication +  --api-url API_URL     The URL of the metadata database API +  -e, --env ENV         The environment to run the command in +  --password [PASSWORD] +                        The password to use for authentication +  --username USERNAME   The username to use for authentication diff --git a/airflow-ctl/docs/images/output_backfills.svg b/airflow-ctl/docs/images/output_backfills.svg index 773141475e3ae..dd590f32527e5 100644 --- a/airflow-ctl/docs/images/output_backfills.svg +++ b/airflow-ctl/docs/images/output_backfills.svg @@ -19,103 +19,103 @@ font-weight: 700; } - .terminal-962206658-matrix { + .terminal-3702003602-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-962206658-title { + .terminal-3702003602-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-962206658-r1 { fill: #c5c8c6 } + .terminal-3702003602-r1 { fill: #c5c8c6 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - Command: backfills + Command: backfills - + - - Usage: airflowctl backfills [-h] COMMAND ... - -Perform Backfills operations - -Positional Arguments: -  COMMAND -    cancel        Perform cancel operation -    create        Perform create operation -    create-dry-run -                  Perform create_dry_run operation -    get           Perform get operation -    list          Perform list operation -    pause         Perform pause operation -    unpause       Perform unpause operation - -Optional Arguments: -  -h, --help      show this help message and exit + + Usage: airflowctl backfills [-h] COMMAND ... + +Perform Backfills operations + +Positional Arguments: +  COMMAND +    cancel        Perform cancel operation +    create        Perform create operation +    create-dry-run +                  Perform create_dry_run operation +    get           Perform get operation +    list          Perform list operation +    pause         Perform pause operation +    unpause       Perform unpause operation + +Options: +  -h, --help      show this help message and exit diff --git a/airflow-ctl/docs/images/output_config.svg b/airflow-ctl/docs/images/output_config.svg index 152fd39a10f10..e993040457df5 100644 --- a/airflow-ctl/docs/images/output_config.svg +++ b/airflow-ctl/docs/images/output_config.svg @@ -19,85 +19,87 @@ font-weight: 700; } - .terminal-2772488152-matrix { + .terminal-2512178088-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2772488152-title { + .terminal-2512178088-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2772488152-r1 { fill: #c5c8c6 } + .terminal-2512178088-r1 { fill: #c5c8c6 } - + - + - + - + - + - + - + - + - + - + - + - + - + - Command: config + Command: config - + - - Usage: airflowctl config [-h] COMMAND ... - -Perform Config operations - -Positional Arguments: -  COMMAND -    get       Perform get operation -    list      Perform list operation - -Optional Arguments: -  -h, --help  show this help message and exit + + Usage: airflowctl config [-h] COMMAND ... + +Perform Config operations + +Positional Arguments: +  COMMAND +    get       Perform get operation +    lint      Lint options for the configuration changes while migrating from  +Airflow 2 to Airflow 3 +    list      Perform list operation + +Options: +  -h, --help  show this help message and exit diff --git a/airflow-ctl/docs/images/output_connections.svg b/airflow-ctl/docs/images/output_connections.svg index 7d9c884d7a1dc..952b010c12e39 100644 --- a/airflow-ctl/docs/images/output_connections.svg +++ b/airflow-ctl/docs/images/output_connections.svg @@ -19,103 +19,103 @@ font-weight: 700; } - .terminal-2751209256-matrix { + .terminal-560929528-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2751209256-title { + .terminal-560929528-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2751209256-r1 { fill: #c5c8c6 } + .terminal-560929528-r1 { fill: #c5c8c6 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - Command: connections + Command: connections - + - - Usage: airflowctl connections [-h] COMMAND ... - -Perform Connections operations - -Positional Arguments: -  COMMAND -    create         Perform create operation -    create-defaults -                   Perform create_defaults operation -    delete         Perform delete operation -    get            Perform get operation -    list           Perform list operation -    test           Perform test operation -    update         Perform update operation - -Optional Arguments: -  -h, --help       show this help message and exit + + Usage: airflowctl connections [-h] COMMAND ... + +Perform Connections operations + +Positional Arguments: +  COMMAND +    create         Perform create operation +    create-defaults +                   Perform create_defaults operation +    delete         Perform delete operation +    get            Perform get operation +    list           Perform list operation +    test           Perform test operation +    update         Perform update operation + +Options: +  -h, --help       show this help message and exit diff --git a/airflow-ctl/docs/images/output_dag.svg b/airflow-ctl/docs/images/output_dag.svg index ece942cf11b91..96a2c849ddec5 100644 --- a/airflow-ctl/docs/images/output_dag.svg +++ b/airflow-ctl/docs/images/output_dag.svg @@ -19,127 +19,127 @@ font-weight: 700; } - .terminal-3760826401-matrix { + .terminal-1884464113-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3760826401-title { + .terminal-1884464113-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3760826401-r1 { fill: #c5c8c6 } + .terminal-1884464113-r1 { fill: #c5c8c6 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - Command: dag + Command: dag - + - - Usage: airflowctl dag [-h] COMMAND ... - -Perform Dag operations - -Positional Arguments: -  COMMAND -    delete           Perform delete operation -    get              Perform get operation -    get-details      Perform get_details operation -    get-import-error -                     Perform get_import_error operation -    get-stats        Perform get_stats operation -    get-tags         Perform get_tags operation -    get-version      Perform get_version operation -    list             Perform list operation -    list-import-error -                     Perform list_import_error operation -    list-version     Perform list_version operation -    list-warning     Perform list_warning operation -    patch            Perform patch operation - -Optional Arguments: -  -h, --help         show this help message and exit + + Usage: airflowctl dag [-h] COMMAND ... + +Perform Dag operations + +Positional Arguments: +  COMMAND +    delete           Perform delete operation +    get              Perform get operation +    get-details      Perform get_details operation +    get-import-error +                     Perform get_import_error operation +    get-stats        Perform get_stats operation +    get-tags         Perform get_tags operation +    get-version      Perform get_version operation +    list             Perform list operation +    list-import-error +                     Perform list_import_error operation +    list-version     Perform list_version operation +    list-warning     Perform list_warning operation +    patch            Perform patch operation + +Options: +  -h, --help         show this help message and exit diff --git a/airflow-ctl/docs/images/output_dagrun.svg b/airflow-ctl/docs/images/output_dagrun.svg index 20792fc3cab65..cd3bc15880d40 100644 --- a/airflow-ctl/docs/images/output_dagrun.svg +++ b/airflow-ctl/docs/images/output_dagrun.svg @@ -19,83 +19,83 @@ font-weight: 700; } - .terminal-668054125-matrix { + .terminal-1550757437-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-668054125-title { + .terminal-1550757437-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-668054125-r1 { fill: #c5c8c6 } + .terminal-1550757437-r1 { fill: #c5c8c6 } - + - + - + - + - + - + - + - + - + - + - + - + - Command: dagrun + Command: dagrun - + - - Usage: airflowctl dagrun [-h] COMMAND ... - -Perform DagRun operations - -Positional Arguments: -  COMMAND -    create    Perform create operation -    get       Perform get operation -    list      Perform list operation - -Optional Arguments: -  -h, --help  show this help message and exit + + Usage: airflowctl dagrun [-h] COMMAND ... + +Perform DagRun operations + +Positional Arguments: +  COMMAND +    create    Perform create operation +    get       Perform get operation +    list      Perform list operation + +Options: +  -h, --help  show this help message and exit diff --git a/airflow-ctl/docs/images/output_jobs.svg b/airflow-ctl/docs/images/output_jobs.svg index 619d18fec1289..6d9504a90cbc4 100644 --- a/airflow-ctl/docs/images/output_jobs.svg +++ b/airflow-ctl/docs/images/output_jobs.svg @@ -19,75 +19,75 @@ font-weight: 700; } - .terminal-2231666968-matrix { + .terminal-2142602472-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2231666968-title { + .terminal-2142602472-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2231666968-r1 { fill: #c5c8c6 } + .terminal-2142602472-r1 { fill: #c5c8c6 } - + - + - + - + - + - + - + - + - + - + - Command: jobs + Command: jobs - + - - Usage: airflowctl jobs [-h] COMMAND ... - -Perform Jobs operations - -Positional Arguments: -  COMMAND -    list      Perform list operation - -Optional Arguments: -  -h, --help  show this help message and exit + + Usage: airflowctl jobs [-h] COMMAND ... + +Perform Jobs operations + +Positional Arguments: +  COMMAND +    list      Perform list operation + +Options: +  -h, --help  show this help message and exit diff --git a/airflow-ctl/docs/images/output_main.svg b/airflow-ctl/docs/images/output_main.svg index ec042419aaf9a..ede79c291896c 100644 --- a/airflow-ctl/docs/images/output_main.svg +++ b/airflow-ctl/docs/images/output_main.svg @@ -19,135 +19,135 @@ font-weight: 700; } - .terminal-2627781444-matrix { + .terminal-101498644-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2627781444-title { + .terminal-101498644-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2627781444-r1 { fill: #c5c8c6 } + .terminal-101498644-r1 { fill: #c5c8c6 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - Command: main + Command: main - + - - Usage: airflowctl [-h] GROUP_OR_COMMAND ... - -Positional Arguments: -  GROUP_OR_COMMAND - -    Groups -      assets        Perform Assets operations -      auth          Manage authentication for CLI. Either pass token from -                    environment variable/parameter or pass username and -                    password. -      backfills     Perform Backfills operations -      config        Perform Config operations -      connections   Perform Connections operations -      dag           Perform Dag operations -      dagrun        Perform DagRun operations -      jobs          Perform Jobs operations -      pools         Perform Pools operations -      providers     Perform Providers operations -      variables     Perform Variables operations - -    Commands: -      version       Show version information - -Optional Arguments: -  -h, --help        show this help message and exit + + Usage: airflowctl [-h] GROUP_OR_COMMAND ... + +Positional Arguments: +  GROUP_OR_COMMAND + +    Groups +      assets        Perform Assets operations +      auth          Manage authentication for CLI. Either pass token from +                    environment variable/parameter or pass username and +                    password. +      backfills     Perform Backfills operations +      config        Perform Config operations +      connections   Perform Connections operations +      dag           Perform Dag operations +      dagrun        Perform DagRun operations +      jobs          Perform Jobs operations +      pools         Perform Pools operations +      providers     Perform Providers operations +      variables     Perform Variables operations + +    Commands: +      version       Show version information + +Options: +  -h, --help        show this help message and exit diff --git a/airflow-ctl/docs/images/output_pools.svg b/airflow-ctl/docs/images/output_pools.svg index 7a5405f2b4e6c..0772aef116f5c 100644 --- a/airflow-ctl/docs/images/output_pools.svg +++ b/airflow-ctl/docs/images/output_pools.svg @@ -19,99 +19,99 @@ font-weight: 700; } - .terminal-3461610554-matrix { + .terminal-3616077834-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3461610554-title { + .terminal-3616077834-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3461610554-r1 { fill: #c5c8c6 } + .terminal-3616077834-r1 { fill: #c5c8c6 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - Command: pools + Command: pools - + - - Usage: airflowctl pools [-h] COMMAND ... - -Perform Pools operations - -Positional Arguments: -  COMMAND -    create    Perform create operation -    delete    Perform delete operation -    export    Export all pools -    get       Perform get operation -    import    Import pools -    list      Perform list operation -    update    Perform update operation - -Optional Arguments: -  -h, --help  show this help message and exit + + Usage: airflowctl pools [-h] COMMAND ... + +Perform Pools operations + +Positional Arguments: +  COMMAND +    create    Perform create operation +    delete    Perform delete operation +    export    Export all pools +    get       Perform get operation +    import    Import pools +    list      Perform list operation +    update    Perform update operation + +Options: +  -h, --help  show this help message and exit diff --git a/airflow-ctl/docs/images/output_providers.svg b/airflow-ctl/docs/images/output_providers.svg index 6771a1cb0b656..a64c52f809250 100644 --- a/airflow-ctl/docs/images/output_providers.svg +++ b/airflow-ctl/docs/images/output_providers.svg @@ -19,75 +19,75 @@ font-weight: 700; } - .terminal-888115112-matrix { + .terminal-3934358392-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-888115112-title { + .terminal-3934358392-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-888115112-r1 { fill: #c5c8c6 } + .terminal-3934358392-r1 { fill: #c5c8c6 } - + - + - + - + - + - + - + - + - + - + - Command: providers + Command: providers - + - - Usage: airflowctl providers [-h] COMMAND ... - -Perform Providers operations - -Positional Arguments: -  COMMAND -    list      Perform list operation - -Optional Arguments: -  -h, --help  show this help message and exit + + Usage: airflowctl providers [-h] COMMAND ... + +Perform Providers operations + +Positional Arguments: +  COMMAND +    list      Perform list operation + +Options: +  -h, --help  show this help message and exit diff --git a/airflow-ctl/docs/images/output_variables.svg b/airflow-ctl/docs/images/output_variables.svg index 4baba40504240..864e7d63630e5 100644 --- a/airflow-ctl/docs/images/output_variables.svg +++ b/airflow-ctl/docs/images/output_variables.svg @@ -19,99 +19,99 @@ font-weight: 700; } - .terminal-2916680709-matrix { + .terminal-1648295878-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2916680709-title { + .terminal-1648295878-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2916680709-r1 { fill: #c5c8c6 } + .terminal-1648295878-r1 { fill: #c5c8c6 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - Command: variables + Command: variables - + - - Usage: airflowctl variables [-h] COMMAND ... - -Perform Variables operations - -Positional Arguments: -  COMMAND -    create    Perform create operation -    delete    Perform delete operation -    export    Export all variables -    get       Perform get operation -    import    Import variables -    list      Perform list operation -    update    Perform update operation - -Optional Arguments: -  -h, --help  show this help message and exit + + Usage: airflowctl variables [-h] COMMAND ... + +Perform Variables operations + +Positional Arguments: +  COMMAND +    create    Perform create operation +    delete    Perform delete operation +    export    Export all variables +    get       Perform get operation +    import    Import variables +    list      Perform list operation +    update    Perform update operation + +Options: +  -h, --help  show this help message and exit diff --git a/airflow-ctl/docs/images/output_version.svg b/airflow-ctl/docs/images/output_version.svg index 511817f589a10..c4ac023c6a1f5 100644 --- a/airflow-ctl/docs/images/output_version.svg +++ b/airflow-ctl/docs/images/output_version.svg @@ -19,59 +19,59 @@ font-weight: 700; } - .terminal-2624149572-matrix { + .terminal-2995541012-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2624149572-title { + .terminal-2995541012-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2624149572-r1 { fill: #c5c8c6 } + .terminal-2995541012-r1 { fill: #c5c8c6 } - + - + - + - + - + - + - Command: version + Command: version - + - - Usage: airflowctl version [-h] - -Show version information - -Optional Arguments: -  -h, --help  show this help message and exit + + Usage: airflowctl version [-h] + +Show version information + +Options: +  -h, --help  show this help message and exit diff --git a/chart/pyproject.toml b/chart/pyproject.toml index 30813c38db28c..2c559d1997dc8 100644 --- a/chart/pyproject.toml +++ b/chart/pyproject.toml @@ -24,7 +24,7 @@ build-backend = "hatchling.build" [project] name = "apache-airflow-helm-chart" description = "Programmatically author, schedule and monitor data pipelines" -requires-python = "~=3.9,<3.13" +requires-python = "~=3.10,<3.13" authors = [ { name = "Apache Software Foundation", email = "dev@airflow.apache.org" }, ] diff --git a/clients/python/pyproject.toml b/clients/python/pyproject.toml index a92844c864bee..2cfdeeaad749a 100644 --- a/clients/python/pyproject.toml +++ b/clients/python/pyproject.toml @@ -25,7 +25,7 @@ dynamic = ["version"] description = "Apache Airflow API (Stable)" readme = "README.md" license-files.globs = ["LICENSE", "NOTICE"] -requires-python = "~=3.9" +requires-python = "~=3.10" authors = [ { name = "Apache Software Foundation", email = "dev@airflow.apache.org" }, ] diff --git a/contributing-docs/13_airflow_dependencies_and_extras.rst b/contributing-docs/13_airflow_dependencies_and_extras.rst index 9ac355f6c11c7..2e1b72c6f6533 100644 --- a/contributing-docs/13_airflow_dependencies_and_extras.rst +++ b/contributing-docs/13_airflow_dependencies_and_extras.rst @@ -85,8 +85,8 @@ from the PyPI package: .. code-block:: bash - pip install "apache-airflow[google,amazon,async]==2.2.5" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.2.5/constraints-3.9.txt" + pip install "apache-airflow[google,amazon,async]==3.0.0" \ + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-3.0.0/constraints-3.10.txt" The last one can be used to install Airflow in "minimal" mode - i.e when bare Airflow is installed without extras. @@ -98,7 +98,7 @@ requirements). .. code-block:: bash pip install -e ".[devel]" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.9.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.10.txt" This also works with extras - for example: @@ -106,7 +106,7 @@ This also works with extras - for example: .. code-block:: bash pip install ".[ssh]" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.9.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.10.txt" There are different set of fixed constraint files for different python major/minor versions and you should @@ -118,7 +118,7 @@ using ``constraints-no-providers`` constraint files as well. .. code-block:: bash pip install . --upgrade \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-no-providers-3.9.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-no-providers-3.10.txt" The ``constraints-.txt`` and ``constraints-no-providers-.txt`` diff --git a/dev/README_RELEASE_AIRFLOW.md b/dev/README_RELEASE_AIRFLOW.md index a56177b937c39..ba1066d02d12a 100644 --- a/dev/README_RELEASE_AIRFLOW.md +++ b/dev/README_RELEASE_AIRFLOW.md @@ -912,7 +912,7 @@ the older branches, you should set the "skip" field to true. ## Verify production images ```shell script -for PYTHON in 3.9 3.10 3.11 3.12 +for PYTHON in 3.10 3.11 3.12 do docker pull apache/airflow:${VERSION}-python${PYTHON} breeze prod-image verify --image-name apache/airflow:${VERSION}-python${PYTHON} diff --git a/dev/airflow_mypy/plugin/outputs.py b/dev/airflow_mypy/plugin/outputs.py index ea7919147430d..f109401b46f23 100644 --- a/dev/airflow_mypy/plugin/outputs.py +++ b/dev/airflow_mypy/plugin/outputs.py @@ -17,7 +17,7 @@ # under the License. from __future__ import annotations -from typing import Callable +from collections.abc import Callable from mypy.plugin import AttributeContext, MethodContext, Plugin from mypy.types import AnyType, Type, TypeOfAny diff --git a/dev/breeze/README.md b/dev/breeze/README.md index ca891e9f7c613..85a6dc144ee1e 100644 --- a/dev/breeze/README.md +++ b/dev/breeze/README.md @@ -135,6 +135,6 @@ PLEASE DO NOT MODIFY THE HASH BELOW! IT IS AUTOMATICALLY UPDATED BY PRE-COMMIT. --------------------------------------------------------------------------------------------------------- -Package config hash: c34f0720a080a434971a03e23168cd4748605b4f1975675b15badc4c6afbd358d5eafe4dd9c60c55bd4defca59db16fcef2f5ed5c70939e539651f287bfcca11 +Package config hash: 980100f98596f859c1d3d1b9db547275f0cc29031f32c80a0f23a811f2695902b1ce04a9d0f1d7e6ef5522745d728d457039cf67d48d4dcf25dcd9ab340d94dd --------------------------------------------------------------------------------------------------------- diff --git a/dev/breeze/doc/images/output-commands.svg b/dev/breeze/doc/images/output-commands.svg index 234bfa5ee81a3..b82c5f4b870b5 100644 --- a/dev/breeze/doc/images/output-commands.svg +++ b/dev/breeze/doc/images/output-commands.svg @@ -321,8 +321,8 @@ ╭─ Execution mode â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® │--python-pPython major/minor version used in Airflow image for   â”‚ │images.                                                â”‚ -│(>3.9< | 3.10 | 3.11 | 3.12)                           â”‚ -│[default: 3.9]                                         â”‚ +│(>3.10< | 3.11 | 3.12)                                 â”‚ +│[default: 3.10]                                        â”‚ │--integrationCore Integrations to enable when running (can be more  â”‚ │than one).                                             â”‚ │(all | all-testable | cassandra | celery | drill |     â”‚ diff --git a/dev/breeze/doc/images/output_ci-image_build.svg b/dev/breeze/doc/images/output_ci-image_build.svg index 5dfe723617826..7e8b2d9b429ec 100644 --- a/dev/breeze/doc/images/output_ci-image_build.svg +++ b/dev/breeze/doc/images/output_ci-image_build.svg @@ -328,8 +328,8 @@ ╭─ Basic usage â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® │--python-pPython major/minor version used in Airflow image for images.│ -│(>3.9< | 3.10 | 3.11 | 3.12)                                â”‚ -│[default: 3.9]                                              â”‚ +│(>3.10< | 3.11 | 3.12)                                      â”‚ +│[default: 3.10]                                             â”‚ │--upgrade-to-newer-dependencies-uWhen set, upgrade all PIP packages to latest.│ │--upgrade-on-failure/--no-upgrade-on-failureWhen set, attempt to run upgrade to newer dependencies when       â”‚ │regular build fails. It is set to False by default on CI and True â”‚ @@ -354,7 +354,7 @@ │(INTEGER RANGE)                                                            â”‚ │[default: 4; 1<=x<=8]                                                      â”‚ │--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT)│ -│[default: 3.9 3.10 3.11 3.12]                                                 â”‚ +│[default: 3.10 3.11 3.12]                                                     â”‚ │--run-in-parallelRun the operation in parallel on all or selected subset of parameters.│ │--skip-cleanupSkip cleanup of temporary files created during parallel run.│ ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_ci-image_build.txt b/dev/breeze/doc/images/output_ci-image_build.txt index 57041fec628ef..f1d7c7a717a1a 100644 --- a/dev/breeze/doc/images/output_ci-image_build.txt +++ b/dev/breeze/doc/images/output_ci-image_build.txt @@ -1 +1 @@ -46406f11021929517710fc7cbdea72cc +c075a5d7ad1134165d16d1c938161fc9 diff --git a/dev/breeze/doc/images/output_ci-image_load.svg b/dev/breeze/doc/images/output_ci-image_load.svg index 027199b79750d..aa8e15d30fec9 100644 --- a/dev/breeze/doc/images/output_ci-image_load.svg +++ b/dev/breeze/doc/images/output_ci-image_load.svg @@ -171,8 +171,8 @@ ╭─ Load image flags â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® │--python-pPython major/minor version used in Airflow image for images.│ -│(>3.9< | 3.10 | 3.11 | 3.12)                                â”‚ -│[default: 3.9]                                              â”‚ +│(>3.10< | 3.11 | 3.12)                                      â”‚ +│[default: 3.10]                                             â”‚ │--platformPlatform for Airflow image.                               â”‚ │(linux/amd64 | linux/arm64 | linux/x86_64 | linux/aarch64)│ │--image-fileOptional file name to load the image from - name must follow the                 â”‚ diff --git a/dev/breeze/doc/images/output_ci-image_load.txt b/dev/breeze/doc/images/output_ci-image_load.txt index bbab37d71bf6a..d7d932a7463cb 100644 --- a/dev/breeze/doc/images/output_ci-image_load.txt +++ b/dev/breeze/doc/images/output_ci-image_load.txt @@ -1 +1 @@ -4cd3045ad6a0a6514cac07e3a9228a8f +d86e2e217fe99bd74580f751dc4c51bd diff --git a/dev/breeze/doc/images/output_ci-image_pull.svg b/dev/breeze/doc/images/output_ci-image_pull.svg index 2ca6acbb8cee1..b6f6da58aa94c 100644 --- a/dev/breeze/doc/images/output_ci-image_pull.svg +++ b/dev/breeze/doc/images/output_ci-image_pull.svg @@ -153,8 +153,8 @@ Pull and optionally verify CI images - possibly in parallel for all Python versions. ╭─ Pull image flags â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® -│--python-pPython major/minor version used in Airflow image for images.(>3.9< | 3.10 | 3.11 | 3.12)│ -│[default: 3.9]                                              â”‚ +│--python-pPython major/minor version used in Airflow image for images.(>3.10< | 3.11 | 3.12)│ +│[default: 3.10]                                             â”‚ │--verifyVerify image.│ │--wait-for-imageWait until image is available.│ ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ @@ -164,7 +164,7 @@ │(INTEGER RANGE)                                                            â”‚ │[default: 4; 1<=x<=8]                                                      â”‚ │--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT)│ -│[default: 3.9 3.10 3.11 3.12]                                                 â”‚ +│[default: 3.10 3.11 3.12]                                                     â”‚ │--skip-cleanupSkip cleanup of temporary files created during parallel run.│ │--debug-resourcesWhether to show resource information while running in parallel.│ │--include-success-outputsWhether to include outputs of successful runs (not shown by default).│ diff --git a/dev/breeze/doc/images/output_ci-image_pull.txt b/dev/breeze/doc/images/output_ci-image_pull.txt index 7d10e3471f4bd..0de3a5b933769 100644 --- a/dev/breeze/doc/images/output_ci-image_pull.txt +++ b/dev/breeze/doc/images/output_ci-image_pull.txt @@ -1 +1 @@ -b8ed28c0a2a42d536bdb1716954a9a4e +c29b9a5e5bf4b503f4e0310006414597 diff --git a/dev/breeze/doc/images/output_ci-image_save.svg b/dev/breeze/doc/images/output_ci-image_save.svg index ac4c5782f7e61..e803d2b341142 100644 --- a/dev/breeze/doc/images/output_ci-image_save.svg +++ b/dev/breeze/doc/images/output_ci-image_save.svg @@ -117,8 +117,8 @@ Save CI image to a file. ╭─ Save image flags â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® -│--python-pPython major/minor version used in Airflow image for images.(>3.9< | 3.10 | 3.11 | 3.12)│ -│[default: 3.9]                                              â”‚ +│--python-pPython major/minor version used in Airflow image for images.(>3.10< | 3.11 | 3.12)│ +│[default: 3.10]                                             â”‚ │--platformPlatform for Airflow image.(linux/amd64 | linux/arm64 | linux/x86_64 | linux/aarch64)│ │--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow]│ │--image-fileOptional file to save the image to.(FILE)│ diff --git a/dev/breeze/doc/images/output_ci-image_save.txt b/dev/breeze/doc/images/output_ci-image_save.txt index bbdba6b5ec21e..5e457d7c315e1 100644 --- a/dev/breeze/doc/images/output_ci-image_save.txt +++ b/dev/breeze/doc/images/output_ci-image_save.txt @@ -1 +1 @@ -1ce181fbc8f99c54a5cd7ecc686d5277 +807d082ba3edd987a78c50608eb280c3 diff --git a/dev/breeze/doc/images/output_ci-image_verify.svg b/dev/breeze/doc/images/output_ci-image_verify.svg index d946e6d967a0b..ae6639d9a7167 100644 --- a/dev/breeze/doc/images/output_ci-image_verify.svg +++ b/dev/breeze/doc/images/output_ci-image_verify.svg @@ -154,8 +154,8 @@ ╭─ Verify image flags â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® │--image-name-nName of the image to verify (overrides --python).(TEXT)│ -│--python-pPython major/minor version used in Airflow image for images.(>3.9< | 3.10 | 3.11 | 3.12)│ -│[default: 3.9]                                              â”‚ +│--python-pPython major/minor version used in Airflow image for images.(>3.10< | 3.11 | 3.12)│ +│[default: 3.10]                                             â”‚ │--pullPull image is missing before attempting to verify it.│ ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Parallel running â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® @@ -164,7 +164,7 @@ │(INTEGER RANGE)                                                            â”‚ │[default: 4; 1<=x<=8]                                                      â”‚ │--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT)│ -│[default: 3.9 3.10 3.11 3.12]                                                 â”‚ +│[default: 3.10 3.11 3.12]                                                     â”‚ │--skip-cleanupSkip cleanup of temporary files created during parallel run.│ │--debug-resourcesWhether to show resource information while running in parallel.│ │--include-success-outputsWhether to include outputs of successful runs (not shown by default).│ diff --git a/dev/breeze/doc/images/output_ci-image_verify.txt b/dev/breeze/doc/images/output_ci-image_verify.txt index 3fec0520f20c4..e1300b3163f00 100644 --- a/dev/breeze/doc/images/output_ci-image_verify.txt +++ b/dev/breeze/doc/images/output_ci-image_verify.txt @@ -1 +1 @@ -71829e8a86a486823dcb10ca7e24ef83 +af0c91ebd8bb2ed858bdad86c43c579a diff --git a/dev/breeze/doc/images/output_k8s_build-k8s-image.svg b/dev/breeze/doc/images/output_k8s_build-k8s-image.svg index a3549aad6b792..b810baa3752f0 100644 --- a/dev/breeze/doc/images/output_k8s_build-k8s-image.svg +++ b/dev/breeze/doc/images/output_k8s_build-k8s-image.svg @@ -154,8 +154,8 @@ ╭─ Build image flags â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® │--python-pPython major/minor version used in Airflow image for images.│ -│(>3.9< | 3.10 | 3.11 | 3.12)                                â”‚ -│[default: 3.9]                                              â”‚ +│(>3.10< | 3.11 | 3.12)                                      â”‚ +│[default: 3.10]                                             â”‚ │--rebuild-base-imageRebuilds base Airflow image before building K8S image.│ │--copy-local-sources/--no-copy-local-sourcesCopy local sources to the image.[default: copy-local-sources]│ │--use-uv/--no-use-uvUse uv instead of pip as packaging tool to build the image.│ @@ -167,7 +167,7 @@ │(INTEGER RANGE)                                                            â”‚ │[default: 4; 1<=x<=8]                                                      â”‚ │--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT)│ -│[default: 3.9 3.10 3.11 3.12]                                                 â”‚ +│[default: 3.10 3.11 3.12]                                                     â”‚ │--skip-cleanupSkip cleanup of temporary files created during parallel run.│ │--debug-resourcesWhether to show resource information while running in parallel.│ │--include-success-outputsWhether to include outputs of successful runs (not shown by default).│ diff --git a/dev/breeze/doc/images/output_k8s_build-k8s-image.txt b/dev/breeze/doc/images/output_k8s_build-k8s-image.txt index 07806232d4c44..67f0160b50209 100644 --- a/dev/breeze/doc/images/output_k8s_build-k8s-image.txt +++ b/dev/breeze/doc/images/output_k8s_build-k8s-image.txt @@ -1 +1 @@ -d611c1e017fc83a7251abf72d5ee5ab7 +40442787a9fafb301236e1821099092a diff --git a/dev/breeze/doc/images/output_k8s_configure-cluster.svg b/dev/breeze/doc/images/output_k8s_configure-cluster.svg index db877992f5cf0..e35f2713ef075 100644 --- a/dev/breeze/doc/images/output_k8s_configure-cluster.svg +++ b/dev/breeze/doc/images/output_k8s_configure-cluster.svg @@ -157,8 +157,8 @@ parallel). ╭─ Configure cluster flags â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® -│--python-pPython major/minor version used in Airflow image for images.(>3.9< | 3.10 | 3.11 | 3.12)│ -│[default: 3.9]                                              â”‚ +│--python-pPython major/minor version used in Airflow image for images.(>3.10< | 3.11 | 3.12)│ +│[default: 3.10]                                             â”‚ │--kubernetes-versionKubernetes version used to create the KinD cluster of.│ │(>v1.30.10< | v1.31.6 | v1.32.3 | v1.33.0)            â”‚ │[default: v1.30.10]                                   â”‚ @@ -170,7 +170,7 @@ │(INTEGER RANGE)                                                                        â”‚ │[default: 2; 1<=x<=4]                                                                  â”‚ │--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT)│ -│[default: 3.9 3.10 3.11 3.12]                                                 â”‚ +│[default: 3.10 3.11 3.12]                                                     â”‚ │--kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT)│ │[default: v1.30.10 v1.31.6 v1.32.3 v1.33.0]                   â”‚ │--skip-cleanupSkip cleanup of temporary files created during parallel run.│ diff --git a/dev/breeze/doc/images/output_k8s_configure-cluster.txt b/dev/breeze/doc/images/output_k8s_configure-cluster.txt index 107af5a24c19d..7ecd42d760681 100644 --- a/dev/breeze/doc/images/output_k8s_configure-cluster.txt +++ b/dev/breeze/doc/images/output_k8s_configure-cluster.txt @@ -1 +1 @@ -0046d0e84f47461bd3f0fad0011c09c4 +6e93a011facfa3eef2cf3daccdbac9bd diff --git a/dev/breeze/doc/images/output_k8s_create-cluster.svg b/dev/breeze/doc/images/output_k8s_create-cluster.svg index 0a2a1cdb8b2fd..4e825353c2e09 100644 --- a/dev/breeze/doc/images/output_k8s_create-cluster.svg +++ b/dev/breeze/doc/images/output_k8s_create-cluster.svg @@ -1,4 +1,4 @@ - + Create a KinD Cluster for Python and Kubernetes version specified (optionally create all clusters in parallel). ╭─ K8S cluster creation flags â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® -│--python-pPython major/minor version used in Airflow image for images.│ -│(>3.9< | 3.10 | 3.11 | 3.12)                                â”‚ -│[default: 3.9]                                              â”‚ -│--kubernetes-versionKubernetes version used to create the KinD cluster of.│ -│(>v1.30.10< | v1.31.6 | v1.32.3 | v1.33.0)            â”‚ -│[default: v1.30.10]                                   â”‚ -│--force-recreate-clusterForce recreation of the cluster even if it is already created.│ -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Parallel options â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® -│--run-in-parallelRun the operation in parallel on all or selected subset of parameters.│ -│--parallelismMaximum number of processes to use while running the operation in parallel for cluster â”‚ -│operations.                                                                            â”‚ -│(INTEGER RANGE)                                                                        â”‚ -│[default: 2; 1<=x<=4]                                                                  â”‚ -│--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT)│ -│[default: 3.9 3.10 3.11 3.12]                                                 â”‚ -│--kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT)│ -│[default: v1.30.10 v1.31.6 v1.32.3 v1.33.0]                   â”‚ -│--skip-cleanupSkip cleanup of temporary files created during parallel run.│ -│--debug-resourcesWhether to show resource information while running in parallel.│ -│--include-success-outputsWhether to include outputs of successful runs (not shown by default).│ -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® -│--verbose-vPrint verbose information about performed steps.│ -│--dry-run-DIf dry-run is set, commands are only printed, not executed.│ -│--help-hShow this message and exit.│ -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +│--python-pPython major/minor version used in Airflow image for images.(>3.10< | 3.11 | 3.12)│ +│[default: 3.10]                                             â”‚ +│--kubernetes-versionKubernetes version used to create the KinD cluster of.│ +│(>v1.30.10< | v1.31.6 | v1.32.3 | v1.33.0)            â”‚ +│[default: v1.30.10]                                   â”‚ +│--force-recreate-clusterForce recreation of the cluster even if it is already created.│ +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Parallel options â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® +│--run-in-parallelRun the operation in parallel on all or selected subset of parameters.│ +│--parallelismMaximum number of processes to use while running the operation in parallel for cluster â”‚ +│operations.                                                                            â”‚ +│(INTEGER RANGE)                                                                        â”‚ +│[default: 2; 1<=x<=4]                                                                  â”‚ +│--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT)│ +│[default: 3.10 3.11 3.12]                                                     â”‚ +│--kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT)│ +│[default: v1.30.10 v1.31.6 v1.32.3 v1.33.0]                   â”‚ +│--skip-cleanupSkip cleanup of temporary files created during parallel run.│ +│--debug-resourcesWhether to show resource information while running in parallel.│ +│--include-success-outputsWhether to include outputs of successful runs (not shown by default).│ +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® +│--verbose-vPrint verbose information about performed steps.│ +│--dry-run-DIf dry-run is set, commands are only printed, not executed.│ +│--help-hShow this message and exit.│ +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_k8s_create-cluster.txt b/dev/breeze/doc/images/output_k8s_create-cluster.txt index d2f2860f3fd7b..8d6330198c8b5 100644 --- a/dev/breeze/doc/images/output_k8s_create-cluster.txt +++ b/dev/breeze/doc/images/output_k8s_create-cluster.txt @@ -1 +1 @@ -c1dd4f0cd63b22815f6539166489e920 +10b1e7982d783b404e854b6bccec5787 diff --git a/dev/breeze/doc/images/output_k8s_delete-cluster.svg b/dev/breeze/doc/images/output_k8s_delete-cluster.svg index 702c07b1c078e..0dbc5d38b1aea 100644 --- a/dev/breeze/doc/images/output_k8s_delete-cluster.svg +++ b/dev/breeze/doc/images/output_k8s_delete-cluster.svg @@ -114,8 +114,8 @@ Delete the current KinD Cluster (optionally all clusters). ╭─ K8S cluster delete flags â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® -│--python-pPython major/minor version used in Airflow image for images.(>3.9< | 3.10 | 3.11 | 3.12)│ -│[default: 3.9]                                              â”‚ +│--python-pPython major/minor version used in Airflow image for images.(>3.10< | 3.11 | 3.12)│ +│[default: 3.10]                                             â”‚ │--kubernetes-versionKubernetes version used to create the KinD cluster of.│ │(>v1.30.10< | v1.31.6 | v1.32.3 | v1.33.0)            â”‚ │[default: v1.30.10]                                   â”‚ diff --git a/dev/breeze/doc/images/output_k8s_delete-cluster.txt b/dev/breeze/doc/images/output_k8s_delete-cluster.txt index 98a79ed1bd48d..169719432d2ff 100644 --- a/dev/breeze/doc/images/output_k8s_delete-cluster.txt +++ b/dev/breeze/doc/images/output_k8s_delete-cluster.txt @@ -1 +1 @@ -d566a2a557cff3850c7ce81368bd08a2 +ad0eea9be87ef3d5451698aab8a25358 diff --git a/dev/breeze/doc/images/output_k8s_deploy-airflow.svg b/dev/breeze/doc/images/output_k8s_deploy-airflow.svg index 40ff39b7a420d..e517993e03a1c 100644 --- a/dev/breeze/doc/images/output_k8s_deploy-airflow.svg +++ b/dev/breeze/doc/images/output_k8s_deploy-airflow.svg @@ -1,4 +1,4 @@ - + Deploy airflow image to the current KinD cluster (or all clusters). ╭─ Airflow deploy flags â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® -│--python-pPython major/minor version used in Airflow image for images.│ -│(>3.9< | 3.10 | 3.11 | 3.12)                                â”‚ -│[default: 3.9]                                              â”‚ -│--kubernetes-versionKubernetes version used to create the KinD cluster of.│ -│(>v1.30.10< | v1.31.6 | v1.32.3 | v1.33.0)            â”‚ -│[default: v1.30.10]                                   â”‚ -│--executorExecutor to use for a kubernetes cluster.                                               â”‚ -│(>LocalExecutor< | KubernetesExecutor | CeleryExecutor | CeleryKubernetesExecutor |     â”‚ -│EdgeExecutor)                                                                           â”‚ -│[default: LocalExecutor]                                                                â”‚ -│--upgradeUpgrade Helm Chart rather than installing it.│ -│--wait-time-in-secondsWait for Airflow api-server for specified number of seconds.(INTEGER RANGE)│ -│--use-standard-namingUse standard naming.│ -│--multi-namespace-modeUse multi namespace mode.│ -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Parallel options â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® -│--run-in-parallelRun the operation in parallel on all or selected subset of parameters.│ -│--parallelismMaximum number of processes to use while running the operation in parallel for cluster â”‚ -│operations.                                                                            â”‚ -│(INTEGER RANGE)                                                                        â”‚ -│[default: 2; 1<=x<=4]                                                                  â”‚ -│--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT)│ -│[default: 3.9 3.10 3.11 3.12]                                                 â”‚ -│--kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT)│ -│[default: v1.30.10 v1.31.6 v1.32.3 v1.33.0]                   â”‚ -│--skip-cleanupSkip cleanup of temporary files created during parallel run.│ -│--debug-resourcesWhether to show resource information while running in parallel.│ -│--include-success-outputsWhether to include outputs of successful runs (not shown by default).│ -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® -│--verbose-vPrint verbose information about performed steps.│ -│--dry-run-DIf dry-run is set, commands are only printed, not executed.│ -│--help-hShow this message and exit.│ -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +│--python-pPython major/minor version used in Airflow image for images.(>3.10< | 3.11 | 3.12)│ +│[default: 3.10]                                             â”‚ +│--kubernetes-versionKubernetes version used to create the KinD cluster of.│ +│(>v1.30.10< | v1.31.6 | v1.32.3 | v1.33.0)            â”‚ +│[default: v1.30.10]                                   â”‚ +│--executorExecutor to use for a kubernetes cluster.                                               â”‚ +│(>LocalExecutor< | KubernetesExecutor | CeleryExecutor | CeleryKubernetesExecutor |     â”‚ +│EdgeExecutor)                                                                           â”‚ +│[default: LocalExecutor]                                                                â”‚ +│--upgradeUpgrade Helm Chart rather than installing it.│ +│--wait-time-in-secondsWait for Airflow api-server for specified number of seconds.(INTEGER RANGE)│ +│--use-standard-namingUse standard naming.│ +│--multi-namespace-modeUse multi namespace mode.│ +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Parallel options â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® +│--run-in-parallelRun the operation in parallel on all or selected subset of parameters.│ +│--parallelismMaximum number of processes to use while running the operation in parallel for cluster â”‚ +│operations.                                                                            â”‚ +│(INTEGER RANGE)                                                                        â”‚ +│[default: 2; 1<=x<=4]                                                                  â”‚ +│--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT)│ +│[default: 3.10 3.11 3.12]                                                     â”‚ +│--kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT)│ +│[default: v1.30.10 v1.31.6 v1.32.3 v1.33.0]                   â”‚ +│--skip-cleanupSkip cleanup of temporary files created during parallel run.│ +│--debug-resourcesWhether to show resource information while running in parallel.│ +│--include-success-outputsWhether to include outputs of successful runs (not shown by default).│ +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® +│--verbose-vPrint verbose information about performed steps.│ +│--dry-run-DIf dry-run is set, commands are only printed, not executed.│ +│--help-hShow this message and exit.│ +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_k8s_deploy-airflow.txt b/dev/breeze/doc/images/output_k8s_deploy-airflow.txt index af7477cff44d8..9b82b31144831 100644 --- a/dev/breeze/doc/images/output_k8s_deploy-airflow.txt +++ b/dev/breeze/doc/images/output_k8s_deploy-airflow.txt @@ -1 +1 @@ -6658e1cb44fcd486de6e9d7170ea92ac +c31bffdf3bba32ed1278cd59f20c2faf diff --git a/dev/breeze/doc/images/output_k8s_k9s.svg b/dev/breeze/doc/images/output_k8s_k9s.svg index e3aab67c929a3..c3b3058d3fbc9 100644 --- a/dev/breeze/doc/images/output_k8s_k9s.svg +++ b/dev/breeze/doc/images/output_k8s_k9s.svg @@ -119,8 +119,8 @@ ╭─ K8S k9s flags â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® │--use-dockerUse Docker to start k8s executor (otherwise k9s from PATH is used and only run with docker│ │if not found on PATH).                                                                    â”‚ -│--python-pPython major/minor version used in Airflow image for images.(>3.9< | 3.10 | 3.11 | 3.12)│ -│[default: 3.9]                                              â”‚ +│--python-pPython major/minor version used in Airflow image for images.(>3.10< | 3.11 | 3.12)│ +│[default: 3.10]                                             â”‚ │--kubernetes-versionKubernetes version used to create the KinD cluster of.│ │(>v1.30.10< | v1.31.6 | v1.32.3 | v1.33.0)            â”‚ │[default: v1.30.10]                                   â”‚ diff --git a/dev/breeze/doc/images/output_k8s_k9s.txt b/dev/breeze/doc/images/output_k8s_k9s.txt index 44bb857e587c4..7191c67d24e31 100644 --- a/dev/breeze/doc/images/output_k8s_k9s.txt +++ b/dev/breeze/doc/images/output_k8s_k9s.txt @@ -1 +1 @@ -ec4d6d8cdd4b6030a566479af0d5ced7 +efedbc9d3a96d716fa0f560e8dc5ea7e diff --git a/dev/breeze/doc/images/output_k8s_logs.svg b/dev/breeze/doc/images/output_k8s_logs.svg index 5fa88dec6a4d9..6e1399a36dc3d 100644 --- a/dev/breeze/doc/images/output_k8s_logs.svg +++ b/dev/breeze/doc/images/output_k8s_logs.svg @@ -115,8 +115,8 @@ Dump k8s logs to ${TMP_DIR}/kind_logs_<cluster_name> directory (optionally all clusters). ╭─ K8S logs flags â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® -│--python-pPython major/minor version used in Airflow image for images.(>3.9< | 3.10 | 3.11 | 3.12)│ -│[default: 3.9]                                              â”‚ +│--python-pPython major/minor version used in Airflow image for images.(>3.10< | 3.11 | 3.12)│ +│[default: 3.10]                                             â”‚ │--kubernetes-versionKubernetes version used to create the KinD cluster of.│ │(>v1.30.10< | v1.31.6 | v1.32.3 | v1.33.0)            â”‚ │[default: v1.30.10]                                   â”‚ diff --git a/dev/breeze/doc/images/output_k8s_logs.txt b/dev/breeze/doc/images/output_k8s_logs.txt index bbd6507e52c2b..46cec2dd8bb7e 100644 --- a/dev/breeze/doc/images/output_k8s_logs.txt +++ b/dev/breeze/doc/images/output_k8s_logs.txt @@ -1 +1 @@ -7cd5d42dfc1cb852425543b39683809f +cf00d923f26a0d331dd7fe7251c56f37 diff --git a/dev/breeze/doc/images/output_k8s_run-complete-tests.svg b/dev/breeze/doc/images/output_k8s_run-complete-tests.svg index 6591630827a99..80014a73460c5 100644 --- a/dev/breeze/doc/images/output_k8s_run-complete-tests.svg +++ b/dev/breeze/doc/images/output_k8s_run-complete-tests.svg @@ -228,8 +228,8 @@ │[default: use-uv]                                          â”‚ ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ K8S tests flags â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® -│--python-pPython major/minor version used in Airflow image for images.(>3.9< | 3.10 | 3.11 | 3.12)│ -│[default: 3.9]                                              â”‚ +│--python-pPython major/minor version used in Airflow image for images.(>3.10< | 3.11 | 3.12)│ +│[default: 3.10]                                             â”‚ │--kubernetes-versionKubernetes version used to create the KinD cluster of.│ │(>v1.30.10< | v1.31.6 | v1.32.3 | v1.33.0)            â”‚ │[default: v1.30.10]                                   â”‚ @@ -246,7 +246,7 @@ │(INTEGER RANGE)                                                                        â”‚ │[default: 2; 1<=x<=4]                                                                  â”‚ │--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT)│ -│[default: 3.9 3.10 3.11 3.12]                                                 â”‚ +│[default: 3.10 3.11 3.12]                                                     â”‚ │--kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT)│ │[default: v1.30.10 v1.31.6 v1.32.3 v1.33.0]                   â”‚ │--skip-cleanupSkip cleanup of temporary files created during parallel run.│ diff --git a/dev/breeze/doc/images/output_k8s_run-complete-tests.txt b/dev/breeze/doc/images/output_k8s_run-complete-tests.txt index efd06d4a5eaa1..128524bb500e8 100644 --- a/dev/breeze/doc/images/output_k8s_run-complete-tests.txt +++ b/dev/breeze/doc/images/output_k8s_run-complete-tests.txt @@ -1 +1 @@ -dc81fd3a8134ec0fd3c48a700ac15d4d +90311a31704a88dbeadc5955a4b26452 diff --git a/dev/breeze/doc/images/output_k8s_shell.svg b/dev/breeze/doc/images/output_k8s_shell.svg index fd7ecdf9e7743..84449a4aaf6df 100644 --- a/dev/breeze/doc/images/output_k8s_shell.svg +++ b/dev/breeze/doc/images/output_k8s_shell.svg @@ -126,8 +126,8 @@ Run shell environment for the current KinD cluster. ╭─ K8S shell flags â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® -│--python-pPython major/minor version used in Airflow image for images.(>3.9< | 3.10 | 3.11 | 3.12)│ -│[default: 3.9]                                              â”‚ +│--python-pPython major/minor version used in Airflow image for images.(>3.10< | 3.11 | 3.12)│ +│[default: 3.10]                                             â”‚ │--kubernetes-versionKubernetes version used to create the KinD cluster of.│ │(>v1.30.10< | v1.31.6 | v1.32.3 | v1.33.0)            â”‚ │[default: v1.30.10]                                   â”‚ diff --git a/dev/breeze/doc/images/output_k8s_shell.txt b/dev/breeze/doc/images/output_k8s_shell.txt index 56289998e5790..16014bad6d105 100644 --- a/dev/breeze/doc/images/output_k8s_shell.txt +++ b/dev/breeze/doc/images/output_k8s_shell.txt @@ -1 +1 @@ -d6b45371ce63aaf4d081fc7b02315c02 +b364a237eaa04d37db3b51620566ac85 diff --git a/dev/breeze/doc/images/output_k8s_status.svg b/dev/breeze/doc/images/output_k8s_status.svg index 6833c5b44db74..5d4f080a5ebe9 100644 --- a/dev/breeze/doc/images/output_k8s_status.svg +++ b/dev/breeze/doc/images/output_k8s_status.svg @@ -1,4 +1,4 @@ - + Check status of the current cluster and airflow deployed to it (optionally all clusters). ╭─ K8S cluster status flags â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® -│--python-pPython major/minor version used in Airflow image for images.│ -│(>3.9< | 3.10 | 3.11 | 3.12)                                â”‚ -│[default: 3.9]                                              â”‚ -│--kubernetes-versionKubernetes version used to create the KinD cluster of.│ -│(>v1.30.10< | v1.31.6 | v1.32.3 | v1.33.0)            â”‚ -│[default: v1.30.10]                                   â”‚ -│--wait-time-in-secondsWait for Airflow api-server for specified number of seconds.(INTEGER RANGE)│ -│--allApply it to all created clusters│ -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® -│--verbose-vPrint verbose information about performed steps.│ -│--dry-run-DIf dry-run is set, commands are only printed, not executed.│ -│--help-hShow this message and exit.│ -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +│--python-pPython major/minor version used in Airflow image for images.(>3.10< | 3.11 | 3.12)│ +│[default: 3.10]                                             â”‚ +│--kubernetes-versionKubernetes version used to create the KinD cluster of.│ +│(>v1.30.10< | v1.31.6 | v1.32.3 | v1.33.0)            â”‚ +│[default: v1.30.10]                                   â”‚ +│--wait-time-in-secondsWait for Airflow api-server for specified number of seconds.(INTEGER RANGE)│ +│--allApply it to all created clusters│ +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® +│--verbose-vPrint verbose information about performed steps.│ +│--dry-run-DIf dry-run is set, commands are only printed, not executed.│ +│--help-hShow this message and exit.│ +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_k8s_status.txt b/dev/breeze/doc/images/output_k8s_status.txt index 0e32e381f9731..e9c655c06abeb 100644 --- a/dev/breeze/doc/images/output_k8s_status.txt +++ b/dev/breeze/doc/images/output_k8s_status.txt @@ -1 +1 @@ -d2818a9846afffdbfd2f20e5c916181d +45ae1dbd57e2d8e7c512bf32f5ab4193 diff --git a/dev/breeze/doc/images/output_k8s_tests.svg b/dev/breeze/doc/images/output_k8s_tests.svg index bcdc44b0b952d..1e15dd87efd79 100644 --- a/dev/breeze/doc/images/output_k8s_tests.svg +++ b/dev/breeze/doc/images/output_k8s_tests.svg @@ -168,8 +168,8 @@ Run tests against the current KinD cluster (optionally for all clusters in parallel). ╭─ K8S tests flags â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® -│--python-pPython major/minor version used in Airflow image for images.(>3.9< | 3.10 | 3.11 | 3.12)│ -│[default: 3.9]                                              â”‚ +│--python-pPython major/minor version used in Airflow image for images.(>3.10< | 3.11 | 3.12)│ +│[default: 3.10]                                             â”‚ │--kubernetes-versionKubernetes version used to create the KinD cluster of.│ │(>v1.30.10< | v1.31.6 | v1.32.3 | v1.33.0)            â”‚ │[default: v1.30.10]                                   â”‚ @@ -186,7 +186,7 @@ │(INTEGER RANGE)                                                                        â”‚ │[default: 2; 1<=x<=4]                                                                  â”‚ │--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT)│ -│[default: 3.9 3.10 3.11 3.12]                                                 â”‚ +│[default: 3.10 3.11 3.12]                                                     â”‚ │--kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT)│ │[default: v1.30.10 v1.31.6 v1.32.3 v1.33.0]                   â”‚ │--skip-cleanupSkip cleanup of temporary files created during parallel run.│ diff --git a/dev/breeze/doc/images/output_k8s_tests.txt b/dev/breeze/doc/images/output_k8s_tests.txt index 40ad8ddf69d0d..f5a8137e26795 100644 --- a/dev/breeze/doc/images/output_k8s_tests.txt +++ b/dev/breeze/doc/images/output_k8s_tests.txt @@ -1 +1 @@ -3cfcacd444fce25165d418357e061b97 +73297fbd99e02da431cb693f8bd69049 diff --git a/dev/breeze/doc/images/output_k8s_upload-k8s-image.svg b/dev/breeze/doc/images/output_k8s_upload-k8s-image.svg index c4678d0831191..5aff919e8c664 100644 --- a/dev/breeze/doc/images/output_k8s_upload-k8s-image.svg +++ b/dev/breeze/doc/images/output_k8s_upload-k8s-image.svg @@ -150,8 +150,8 @@ Upload k8s-ready airflow image to the KinD cluster (optionally to all clusters in parallel) ╭─ Upload image flags â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® -│--python-pPython major/minor version used in Airflow image for images.(>3.9< | 3.10 | 3.11 | 3.12)│ -│[default: 3.9]                                              â”‚ +│--python-pPython major/minor version used in Airflow image for images.(>3.10< | 3.11 | 3.12)│ +│[default: 3.10]                                             â”‚ │--kubernetes-versionKubernetes version used to create the KinD cluster of.│ │(>v1.30.10< | v1.31.6 | v1.32.3 | v1.33.0)            â”‚ │[default: v1.30.10]                                   â”‚ @@ -162,7 +162,7 @@ │(INTEGER RANGE)                                                            â”‚ │[default: 4; 1<=x<=8]                                                      â”‚ │--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT)│ -│[default: 3.9 3.10 3.11 3.12]                                                 â”‚ +│[default: 3.10 3.11 3.12]                                                     â”‚ │--kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT)│ │[default: v1.30.10 v1.31.6 v1.32.3 v1.33.0]                   â”‚ │--skip-cleanupSkip cleanup of temporary files created during parallel run.│ diff --git a/dev/breeze/doc/images/output_k8s_upload-k8s-image.txt b/dev/breeze/doc/images/output_k8s_upload-k8s-image.txt index 2f1dc62ea085f..c50b134b778b4 100644 --- a/dev/breeze/doc/images/output_k8s_upload-k8s-image.txt +++ b/dev/breeze/doc/images/output_k8s_upload-k8s-image.txt @@ -1 +1 @@ -9eca0f3ec95e6f89e6c8925e664b2af1 +7b05f647b8a0009a1a18471678dbd14d diff --git a/dev/breeze/doc/images/output_prod-image_build.svg b/dev/breeze/doc/images/output_prod-image_build.svg index 4827d89a1fc6b..94e6a33e2c482 100644 --- a/dev/breeze/doc/images/output_prod-image_build.svg +++ b/dev/breeze/doc/images/output_prod-image_build.svg @@ -1,4 +1,4 @@ - + │option is ignored when building images.                                              â”‚ │(TEXT)                                                                               â”‚ │--install-airflow-version-VInstall version of Airflow from PyPI.(TEXT)│ -│--python-pPython major/minor version used in Airflow image for images.│ -│(>3.9< | 3.10 | 3.11 | 3.12)                                â”‚ -│[default: 3.9]                                              â”‚ -│--version-suffixVersion suffix used for PyPI packages (a1, a2, b1, rc1, rc2, .dev0, .dev1, .post1,   â”‚ -│.post2 etc.). Note the `.` is need in `.dev0` and `.post`. Might be followed with    â”‚ -│+local_version                                                                       â”‚ -│(TEXT)                                                                               â”‚ -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Building images in parallel â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® -│--run-in-parallelRun the operation in parallel on all or selected subset of parameters.│ -│--parallelismMaximum number of processes to use while running the operation in parallel.│ -│(INTEGER RANGE)                                                            â”‚ -│[default: 4; 1<=x<=8]                                                      â”‚ -│--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT)│ -│[default: 3.9 3.10 3.11 3.12]                                                 â”‚ -│--skip-cleanupSkip cleanup of temporary files created during parallel run.│ -│--debug-resourcesWhether to show resource information while running in parallel.│ -│--include-success-outputsWhether to include outputs of successful runs (not shown by default).│ -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Advanced build options (for power users) â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® -│--additional-pip-install-flagsAdditional flags added to `pip install` commands (except reinstalling `pip`       â”‚ -│itself).                                                                          â”‚ -│(TEXT)                                                                            â”‚ -│--commit-shaCommit SHA that is used to build the images.(TEXT)│ -│--debian-versionDebian version used in Airflow image as base for building images.(bookworm)│ -│[default: bookworm]                                              â”‚ -│--python-imageIf specified this is the base python image used to build the image. Should be     â”‚ -│something like: python:VERSION-slim-bookworm.                                     â”‚ -│(TEXT)                                                                            â”‚ -│--use-uv/--no-use-uvUse uv instead of pip as packaging tool to build the image.[default: no-use-uv]│ -│--uv-http-timeoutTimeout for requests that UV makes (only used in case of UV builds).│ -│(INTEGER RANGE)                                                     â”‚ -│[default: 300; x>=1]                                                â”‚ -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Selecting constraint location (for power users) â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® -│--airflow-constraints-locationLocation of airflow constraints to use (remote URL or local context file).(TEXT)│ -│--airflow-constraints-modeMode of constraints for Airflow for PROD image building.               â”‚ -│(constraints | constraints-no-providers | constraints-source-providers)│ -│[default: constraints]                                                 â”‚ -│--airflow-constraints-referenceConstraint reference to use when building the image.(TEXT)│ -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Choosing dependencies and extras (for power users) â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® -│--airflow-extrasExtras to install by default.                                                   â”‚ -│(TEXT)                                                                          â”‚ -│[default:                                                                       â”‚ -│aiobotocore,amazon,async,celery,cncf-kubernetes,common-io,common-messaging,dock…│ -│--additional-airflow-extrasAdditional extra package while installing Airflow in the image.(TEXT)│ -│--additional-python-depsAdditional python dependencies to use when building the images.(TEXT)│ -│--dev-apt-depsApt dev dependencies to use when building the images.(TEXT)│ -│--additional-dev-apt-depsAdditional apt dev dependencies to use when building the images.(TEXT)│ -│--dev-apt-commandCommand executed before dev apt deps are installed.(TEXT)│ -│--additional-dev-apt-commandAdditional command executed before dev apt deps are installed.(TEXT)│ -│--additional-dev-apt-envAdditional environment variables set when adding dev dependencies.(TEXT)│ -│--runtime-apt-depsApt runtime dependencies to use when building the images.(TEXT)│ -│--additional-runtime-apt-depsAdditional apt runtime dependencies to use when building the images.(TEXT)│ -│--runtime-apt-commandCommand executed before runtime apt deps are installed.(TEXT)│ -│--additional-runtime-apt-commandAdditional command executed before runtime apt deps are installed.(TEXT)│ -│--additional-runtime-apt-envAdditional environment variables set when adding runtime dependencies.(TEXT)│ -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Advanced customization options (for specific customization needs) â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® -│--installation-methodInstall Airflow from: sources or PyPI.(. | apache-airflow)│ -│[default: .]                          â”‚ -│--install-airflow-referenceInstall Airflow using GitHub tag or branch.(TEXT)│ -│--install-distributions-from-contextInstall distributions from local docker-context-files when building  â”‚ -│image. Implies --disable-airflow-repo-cache│ -│--install-mysql-client-typeWhich client to choose when installing.(mariadb | mysql)│ -│--cleanup-contextClean up docker context files before running build (cannot be used   â”‚ -│together with --install-distributions-from-context).                 â”‚ -│--use-constraints-for-context-distributionsUses constraints for context distributions installation - either from│ -│constraints store in docker-context-files or from github.            â”‚ -│--disable-airflow-repo-cacheDisable cache from Airflow repository during building.│ -│--disable-mysql-client-installationDo not install MySQL client.│ -│--disable-mssql-client-installationDo not install MsSQl client.│ -│--disable-postgres-client-installationDo not install Postgres client.│ -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Preparing cache and push (for maintainers and CI) â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® -│--builderBuildx builder used to perform `docker buildx build` commands.(TEXT)│ -│[default: autodetect]                                         â”‚ -│--platformPlatform for Airflow image.                                                         â”‚ -│(linux/amd64 | linux/arm64 | linux/x86_64 | linux/aarch64 | linux/amd64,linux/arm64)│ -│--pushPush image after building it.│ -│--prepare-buildx-cachePrepares build cache (this is done as separate per-platform steps instead of building the â”‚ -│image).                                                                                   â”‚ -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Github authentication â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® -│--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow]│ -│--github-tokenThe token used to authenticate to GitHub.(TEXT)│ -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® -│--answer-aForce answer to questions.(y | n | q | yes | no | quit)│ -│--dry-run-DIf dry-run is set, commands are only printed, not executed.│ -│--verbose-vPrint verbose information about performed steps.│ -│--help-hShow this message and exit.│ -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +│--python-pPython major/minor version used in Airflow image for images.(>3.10< | 3.11 | 3.12)│ +│[default: 3.10]                                             â”‚ +│--version-suffixVersion suffix used for PyPI packages (a1, a2, b1, rc1, rc2, .dev0, .dev1, .post1,   â”‚ +│.post2 etc.). Note the `.` is need in `.dev0` and `.post`. Might be followed with    â”‚ +│+local_version                                                                       â”‚ +│(TEXT)                                                                               â”‚ +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Building images in parallel â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® +│--run-in-parallelRun the operation in parallel on all or selected subset of parameters.│ +│--parallelismMaximum number of processes to use while running the operation in parallel.│ +│(INTEGER RANGE)                                                            â”‚ +│[default: 4; 1<=x<=8]                                                      â”‚ +│--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT)│ +│[default: 3.10 3.11 3.12]                                                     â”‚ +│--skip-cleanupSkip cleanup of temporary files created during parallel run.│ +│--debug-resourcesWhether to show resource information while running in parallel.│ +│--include-success-outputsWhether to include outputs of successful runs (not shown by default).│ +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Advanced build options (for power users) â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® +│--additional-pip-install-flagsAdditional flags added to `pip install` commands (except reinstalling `pip`       â”‚ +│itself).                                                                          â”‚ +│(TEXT)                                                                            â”‚ +│--commit-shaCommit SHA that is used to build the images.(TEXT)│ +│--debian-versionDebian version used in Airflow image as base for building images.(bookworm)│ +│[default: bookworm]                                              â”‚ +│--python-imageIf specified this is the base python image used to build the image. Should be     â”‚ +│something like: python:VERSION-slim-bookworm.                                     â”‚ +│(TEXT)                                                                            â”‚ +│--use-uv/--no-use-uvUse uv instead of pip as packaging tool to build the image.[default: no-use-uv]│ +│--uv-http-timeoutTimeout for requests that UV makes (only used in case of UV builds).│ +│(INTEGER RANGE)                                                     â”‚ +│[default: 300; x>=1]                                                â”‚ +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Selecting constraint location (for power users) â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® +│--airflow-constraints-locationLocation of airflow constraints to use (remote URL or local context file).(TEXT)│ +│--airflow-constraints-modeMode of constraints for Airflow for PROD image building.               â”‚ +│(constraints | constraints-no-providers | constraints-source-providers)│ +│[default: constraints]                                                 â”‚ +│--airflow-constraints-referenceConstraint reference to use when building the image.(TEXT)│ +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Choosing dependencies and extras (for power users) â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® +│--airflow-extrasExtras to install by default.                                                   â”‚ +│(TEXT)                                                                          â”‚ +│[default:                                                                       â”‚ +│aiobotocore,amazon,async,celery,cncf-kubernetes,common-io,common-messaging,dock…│ +│--additional-airflow-extrasAdditional extra package while installing Airflow in the image.(TEXT)│ +│--additional-python-depsAdditional python dependencies to use when building the images.(TEXT)│ +│--dev-apt-depsApt dev dependencies to use when building the images.(TEXT)│ +│--additional-dev-apt-depsAdditional apt dev dependencies to use when building the images.(TEXT)│ +│--dev-apt-commandCommand executed before dev apt deps are installed.(TEXT)│ +│--additional-dev-apt-commandAdditional command executed before dev apt deps are installed.(TEXT)│ +│--additional-dev-apt-envAdditional environment variables set when adding dev dependencies.(TEXT)│ +│--runtime-apt-depsApt runtime dependencies to use when building the images.(TEXT)│ +│--additional-runtime-apt-depsAdditional apt runtime dependencies to use when building the images.(TEXT)│ +│--runtime-apt-commandCommand executed before runtime apt deps are installed.(TEXT)│ +│--additional-runtime-apt-commandAdditional command executed before runtime apt deps are installed.(TEXT)│ +│--additional-runtime-apt-envAdditional environment variables set when adding runtime dependencies.(TEXT)│ +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Advanced customization options (for specific customization needs) â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® +│--installation-methodInstall Airflow from: sources or PyPI.(. | apache-airflow)│ +│[default: .]                          â”‚ +│--install-airflow-referenceInstall Airflow using GitHub tag or branch.(TEXT)│ +│--install-distributions-from-contextInstall distributions from local docker-context-files when building  â”‚ +│image. Implies --disable-airflow-repo-cache│ +│--install-mysql-client-typeWhich client to choose when installing.(mariadb | mysql)│ +│--cleanup-contextClean up docker context files before running build (cannot be used   â”‚ +│together with --install-distributions-from-context).                 â”‚ +│--use-constraints-for-context-distributionsUses constraints for context distributions installation - either from│ +│constraints store in docker-context-files or from github.            â”‚ +│--disable-airflow-repo-cacheDisable cache from Airflow repository during building.│ +│--disable-mysql-client-installationDo not install MySQL client.│ +│--disable-mssql-client-installationDo not install MsSQl client.│ +│--disable-postgres-client-installationDo not install Postgres client.│ +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Preparing cache and push (for maintainers and CI) â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® +│--builderBuildx builder used to perform `docker buildx build` commands.(TEXT)│ +│[default: autodetect]                                         â”‚ +│--platformPlatform for Airflow image.                                                         â”‚ +│(linux/amd64 | linux/arm64 | linux/x86_64 | linux/aarch64 | linux/amd64,linux/arm64)│ +│--pushPush image after building it.│ +│--prepare-buildx-cachePrepares build cache (this is done as separate per-platform steps instead of building the â”‚ +│image).                                                                                   â”‚ +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Github authentication â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® +│--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow]│ +│--github-tokenThe token used to authenticate to GitHub.(TEXT)│ +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® +│--answer-aForce answer to questions.(y | n | q | yes | no | quit)│ +│--dry-run-DIf dry-run is set, commands are only printed, not executed.│ +│--verbose-vPrint verbose information about performed steps.│ +│--help-hShow this message and exit.│ +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_prod-image_build.txt b/dev/breeze/doc/images/output_prod-image_build.txt index e187149f22ae6..58f9fe1ecf939 100644 --- a/dev/breeze/doc/images/output_prod-image_build.txt +++ b/dev/breeze/doc/images/output_prod-image_build.txt @@ -1 +1 @@ -bd7a879acaf67b0332acf3b60e234193 +f0627afea3a75fad4886afaa9a517fdf diff --git a/dev/breeze/doc/images/output_prod-image_load.svg b/dev/breeze/doc/images/output_prod-image_load.svg index 746b741f9cdec..d40cc1db81ba9 100644 --- a/dev/breeze/doc/images/output_prod-image_load.svg +++ b/dev/breeze/doc/images/output_prod-image_load.svg @@ -156,8 +156,8 @@ ╭─ Load image flags â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® │--python-pPython major/minor version used in Airflow image for images.│ -│(>3.9< | 3.10 | 3.11 | 3.12)                                â”‚ -│[default: 3.9]                                              â”‚ +│(>3.10< | 3.11 | 3.12)                                      â”‚ +│[default: 3.10]                                             â”‚ │--platformPlatform for Airflow image.                               â”‚ │(linux/amd64 | linux/arm64 | linux/x86_64 | linux/aarch64)│ │--image-fileOptional file to save the image to.(FILE)│ diff --git a/dev/breeze/doc/images/output_prod-image_load.txt b/dev/breeze/doc/images/output_prod-image_load.txt index 44d94e2b0b108..e229179e97f35 100644 --- a/dev/breeze/doc/images/output_prod-image_load.txt +++ b/dev/breeze/doc/images/output_prod-image_load.txt @@ -1 +1 @@ -db73218af61a7c7092636e29846b9b5b +de986e6f731cef102f8f2f40dd2e8b01 diff --git a/dev/breeze/doc/images/output_prod-image_pull.svg b/dev/breeze/doc/images/output_prod-image_pull.svg index 23ff7aa9e41d9..ccddaeaf3a14e 100644 --- a/dev/breeze/doc/images/output_prod-image_pull.svg +++ b/dev/breeze/doc/images/output_prod-image_pull.svg @@ -153,8 +153,8 @@ Pull and optionally verify Production images - possibly in parallel for all Python versions. ╭─ Pull image flags â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® -│--python-pPython major/minor version used in Airflow image for images.(>3.9< | 3.10 | 3.11 | 3.12)│ -│[default: 3.9]                                              â”‚ +│--python-pPython major/minor version used in Airflow image for images.(>3.10< | 3.11 | 3.12)│ +│[default: 3.10]                                             â”‚ │--verifyVerify image.│ │--wait-for-imageWait until image is available.│ ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ @@ -164,7 +164,7 @@ │(INTEGER RANGE)                                                            â”‚ │[default: 4; 1<=x<=8]                                                      â”‚ │--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT)│ -│[default: 3.9 3.10 3.11 3.12]                                                 â”‚ +│[default: 3.10 3.11 3.12]                                                     â”‚ │--skip-cleanupSkip cleanup of temporary files created during parallel run.│ │--debug-resourcesWhether to show resource information while running in parallel.│ │--include-success-outputsWhether to include outputs of successful runs (not shown by default).│ diff --git a/dev/breeze/doc/images/output_prod-image_pull.txt b/dev/breeze/doc/images/output_prod-image_pull.txt index d6ac534091a67..6e0a79486fc8e 100644 --- a/dev/breeze/doc/images/output_prod-image_pull.txt +++ b/dev/breeze/doc/images/output_prod-image_pull.txt @@ -1 +1 @@ -beceddc6149385f810b4bb8ee798416c +a1c4d0b647670b696293bde6e97898bd diff --git a/dev/breeze/doc/images/output_prod-image_save.svg b/dev/breeze/doc/images/output_prod-image_save.svg index d0c3231c38bcf..992d5ce22e9b0 100644 --- a/dev/breeze/doc/images/output_prod-image_save.svg +++ b/dev/breeze/doc/images/output_prod-image_save.svg @@ -117,8 +117,8 @@ Save PROD image to a file. ╭─ Save image flags â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® -│--python-pPython major/minor version used in Airflow image for images.(>3.9< | 3.10 | 3.11 | 3.12)│ -│[default: 3.9]                                              â”‚ +│--python-pPython major/minor version used in Airflow image for images.(>3.10< | 3.11 | 3.12)│ +│[default: 3.10]                                             â”‚ │--platformPlatform for Airflow image.(linux/amd64 | linux/arm64 | linux/x86_64 | linux/aarch64)│ │--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow]│ │--image-fileOptional file to save the image to.(FILE)│ diff --git a/dev/breeze/doc/images/output_prod-image_save.txt b/dev/breeze/doc/images/output_prod-image_save.txt index 5edb777eef92c..b39bbfdb68c8a 100644 --- a/dev/breeze/doc/images/output_prod-image_save.txt +++ b/dev/breeze/doc/images/output_prod-image_save.txt @@ -1 +1 @@ -60db655a3dbcd7cafcfe68548c60fcb4 +b4b74446ac624e2114a03773996968fc diff --git a/dev/breeze/doc/images/output_prod-image_verify.svg b/dev/breeze/doc/images/output_prod-image_verify.svg index d5341f2bbab0d..0c4c68ad8ec87 100644 --- a/dev/breeze/doc/images/output_prod-image_verify.svg +++ b/dev/breeze/doc/images/output_prod-image_verify.svg @@ -160,8 +160,8 @@ ╭─ Verify image flags â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® │--image-name-nName of the image to verify (overrides --python).(TEXT)│ -│--python-pPython major/minor version used in Airflow image for images.(>3.9< | 3.10 | 3.11 | 3.12)│ -│[default: 3.9]                                              â”‚ +│--python-pPython major/minor version used in Airflow image for images.(>3.10< | 3.11 | 3.12)│ +│[default: 3.10]                                             â”‚ │--slim-imageThe image to verify is slim and non-slim tests should be skipped.│ │--pullPull image is missing before attempting to verify it.│ │--manifest-fileRead digest of the image from the manifest file instead of using name and pulling it.(FILE)│ @@ -172,7 +172,7 @@ │(INTEGER RANGE)                                                            â”‚ │[default: 4; 1<=x<=8]                                                      â”‚ │--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT)│ -│[default: 3.9 3.10 3.11 3.12]                                                 â”‚ +│[default: 3.10 3.11 3.12]                                                     â”‚ │--skip-cleanupSkip cleanup of temporary files created during parallel run.│ │--debug-resourcesWhether to show resource information while running in parallel.│ │--include-success-outputsWhether to include outputs of successful runs (not shown by default).│ diff --git a/dev/breeze/doc/images/output_prod-image_verify.txt b/dev/breeze/doc/images/output_prod-image_verify.txt index 909ac7db8e4aa..857a970c85745 100644 --- a/dev/breeze/doc/images/output_prod-image_verify.txt +++ b/dev/breeze/doc/images/output_prod-image_verify.txt @@ -1 +1 @@ -76afb62329b439b9b2a6000c0be68a04 +8fcf39b4153549e82cddcf3746126801 diff --git a/dev/breeze/doc/images/output_release-management_generate-constraints.svg b/dev/breeze/doc/images/output_release-management_generate-constraints.svg index 4cd4b73b12018..bc0ea443f1c77 100644 --- a/dev/breeze/doc/images/output_release-management_generate-constraints.svg +++ b/dev/breeze/doc/images/output_release-management_generate-constraints.svg @@ -1,4 +1,4 @@ - + │(constraints-source-providers | constraints | constraints-no-providers)│ │[default: constraints-source-providers]                                â”‚ │--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow]│ -│--python-pPython major/minor version used in Airflow image for images.│ -│(>3.9< | 3.10 | 3.11 | 3.12)                                â”‚ -│[default: 3.9]                                              â”‚ -│--use-uv/--no-use-uvUse uv instead of pip as packaging tool to build the image.[default: use-uv]│ -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Parallel running â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® -│--debug-resourcesWhether to show resource information while running in parallel.│ -│--parallelismMaximum number of processes to use while running the operation in parallel.(INTEGER RANGE)│ -│[default: 4; 1<=x<=8]                                                      â”‚ -│--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT)│ -│[default: 3.9 3.10 3.11 3.12]                                                 â”‚ -│--run-in-parallelRun the operation in parallel on all or selected subset of parameters.│ -│--skip-cleanupSkip cleanup of temporary files created during parallel run.│ -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® -│--verbose-vPrint verbose information about performed steps.│ -│--dry-run-DIf dry-run is set, commands are only printed, not executed.│ -│--answer-aForce answer to questions.(y | n | q | yes | no | quit)│ -│--help-hShow this message and exit.│ -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +│--python-pPython major/minor version used in Airflow image for images.(>3.10< | 3.11 | 3.12)│ +│[default: 3.10]                                             â”‚ +│--use-uv/--no-use-uvUse uv instead of pip as packaging tool to build the image.[default: use-uv]│ +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Parallel running â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® +│--debug-resourcesWhether to show resource information while running in parallel.│ +│--parallelismMaximum number of processes to use while running the operation in parallel.(INTEGER RANGE)│ +│[default: 4; 1<=x<=8]                                                      â”‚ +│--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT)│ +│[default: 3.10 3.11 3.12]                                                     â”‚ +│--run-in-parallelRun the operation in parallel on all or selected subset of parameters.│ +│--skip-cleanupSkip cleanup of temporary files created during parallel run.│ +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® +│--verbose-vPrint verbose information about performed steps.│ +│--dry-run-DIf dry-run is set, commands are only printed, not executed.│ +│--answer-aForce answer to questions.(y | n | q | yes | no | quit)│ +│--help-hShow this message and exit.│ +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_release-management_generate-constraints.txt b/dev/breeze/doc/images/output_release-management_generate-constraints.txt index a9d57db03a898..b1402f55a33f4 100644 --- a/dev/breeze/doc/images/output_release-management_generate-constraints.txt +++ b/dev/breeze/doc/images/output_release-management_generate-constraints.txt @@ -1 +1 @@ -8c14b225d34fd1208e806e1264b1065d +7721551d951d9d1f1aa4a65fc40590fb diff --git a/dev/breeze/doc/images/output_release-management_install-provider-distributions.svg b/dev/breeze/doc/images/output_release-management_install-provider-distributions.svg index 700503f144096..f0b1669dc1af8 100644 --- a/dev/breeze/doc/images/output_release-management_install-provider-distributions.svg +++ b/dev/breeze/doc/images/output_release-management_install-provider-distributions.svg @@ -300,8 +300,8 @@ Installs provider distributiobs that can be found in dist. ╭─ Provider installation flags â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® -│--python-pPython major/minor version used in Airflow image for images.(>3.9< | 3.10 | 3.11 | 3.12)│ -│[default: 3.9]                                              â”‚ +│--python-pPython major/minor version used in Airflow image for images.(>3.10< | 3.11 | 3.12)│ +│[default: 3.10]                                             â”‚ │--mount-sourcesChoose scope of local sources that should be mounted, skipped, or removed (default =       â”‚ │selected).                                                                                 â”‚ │(selected | all | skip | remove | tests | providers-and-tests)                             â”‚ diff --git a/dev/breeze/doc/images/output_release-management_install-provider-distributions.txt b/dev/breeze/doc/images/output_release-management_install-provider-distributions.txt index 89c64b6e4d93f..7abc4df7f96ab 100644 --- a/dev/breeze/doc/images/output_release-management_install-provider-distributions.txt +++ b/dev/breeze/doc/images/output_release-management_install-provider-distributions.txt @@ -1 +1 @@ -c04ca4aa246f2fd444efed0308474fa1 +1ea2317652b1d9d4f58e25de06c1fca8 diff --git a/dev/breeze/doc/images/output_release-management_merge-prod-images.svg b/dev/breeze/doc/images/output_release-management_merge-prod-images.svg index 1ba1a022fbb67..50c44e8756f3b 100644 --- a/dev/breeze/doc/images/output_release-management_merge-prod-images.svg +++ b/dev/breeze/doc/images/output_release-management_merge-prod-images.svg @@ -149,7 +149,7 @@ │*--airflow-versionAirflow version to release (2.3.0, 2.3.0rc1 etc.)(TEXT)[required]│ │--python-pPython major/minor version used in Airflow image for images (if not specified - all python│ │versions are used).                                                                       â”‚ -│(3.9 | 3.10 | 3.11 | 3.12)                                                                â”‚ +│(3.10 | 3.11 | 3.12)                                                                      â”‚ │--slim-imagesWhether to prepare slim images instead of the regular ones.│ │*--metadata-folderFolder to write the build metadata to. When this option is specified the image is pushed  â”‚ │to registryonly by digests not by the tag because we are going to merge several images in â”‚ diff --git a/dev/breeze/doc/images/output_release-management_merge-prod-images.txt b/dev/breeze/doc/images/output_release-management_merge-prod-images.txt index 7341117037271..f830ba93dbd48 100644 --- a/dev/breeze/doc/images/output_release-management_merge-prod-images.txt +++ b/dev/breeze/doc/images/output_release-management_merge-prod-images.txt @@ -1 +1 @@ -3012509bbf65a7cf0d9d95f377ce58ec +a22a4eeefd00c33e8f68738d49d76c46 diff --git a/dev/breeze/doc/images/output_release-management_release-prod-images.svg b/dev/breeze/doc/images/output_release-management_release-prod-images.svg index 8584aa2ed5513..74327c40bffa5 100644 --- a/dev/breeze/doc/images/output_release-management_release-prod-images.svg +++ b/dev/breeze/doc/images/output_release-management_release-prod-images.svg @@ -170,7 +170,7 @@ │*--airflow-versionAirflow version to release (2.3.0, 2.3.0rc1 etc.)(TEXT)[required]│ │--python-pPython major/minor version used in Airflow image for images (if not specified - all python│ │versions are used).                                                                       â”‚ -│(3.9 | 3.10 | 3.11 | 3.12)                                                                â”‚ +│(3.10 | 3.11 | 3.12)                                                                      â”‚ │--platformPlatform to build images for (if not specified, multiplatform images will be built. â”‚ │(linux/amd64 | linux/arm64 | linux/x86_64 | linux/aarch64 | linux/amd64,linux/arm64)│ │[default: linux/amd64,linux/arm64]                                                  â”‚ diff --git a/dev/breeze/doc/images/output_release-management_release-prod-images.txt b/dev/breeze/doc/images/output_release-management_release-prod-images.txt index 52e095035a188..7547a6eed14d2 100644 --- a/dev/breeze/doc/images/output_release-management_release-prod-images.txt +++ b/dev/breeze/doc/images/output_release-management_release-prod-images.txt @@ -1 +1 @@ -fef18c3d93edfb7eefcf44162788f4f9 +8fc83e19c1176ffd508492fbe911b428 diff --git a/dev/breeze/doc/images/output_release-management_verify-provider-distributions.svg b/dev/breeze/doc/images/output_release-management_verify-provider-distributions.svg index abffb682a0826..138a709e161ef 100644 --- a/dev/breeze/doc/images/output_release-management_verify-provider-distributions.svg +++ b/dev/breeze/doc/images/output_release-management_verify-provider-distributions.svg @@ -273,8 +273,8 @@ Verifies if all provider code is following expectations for providers. ╭─ Provider verification flags â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® -│--python-pPython major/minor version used in Airflow image for images.(>3.9< | 3.10 | 3.11 | 3.12)│ -│[default: 3.9]                                              â”‚ +│--python-pPython major/minor version used in Airflow image for images.(>3.10< | 3.11 | 3.12)│ +│[default: 3.10]                                             â”‚ │--mount-sourcesChoose scope of local sources that should be mounted, skipped, or removed (default =       â”‚ │selected).                                                                                 â”‚ │(selected | all | skip | remove | tests | providers-and-tests)                             â”‚ diff --git a/dev/breeze/doc/images/output_release-management_verify-provider-distributions.txt b/dev/breeze/doc/images/output_release-management_verify-provider-distributions.txt index 889a731bf110b..1661d2e8a095e 100644 --- a/dev/breeze/doc/images/output_release-management_verify-provider-distributions.txt +++ b/dev/breeze/doc/images/output_release-management_verify-provider-distributions.txt @@ -1 +1 @@ -26b34f4a3b053d7114ffc7dee194c87b +fcc7b57ae5cc1d427981eadc3499f281 diff --git a/dev/breeze/doc/images/output_sbom_export-dependency-information.svg b/dev/breeze/doc/images/output_sbom_export-dependency-information.svg index fc7ff4373b6ec..ee603e09b45b7 100644 --- a/dev/breeze/doc/images/output_sbom_export-dependency-information.svg +++ b/dev/breeze/doc/images/output_sbom_export-dependency-information.svg @@ -160,8 +160,8 @@ ╭─ Export dependency information flags â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® │*--airflow-version-AAirflow version to use for the command.(TEXT)[required]│ │--python-pPython major/minor version used in Airflow image for images.│ -│(>3.9< | 3.10 | 3.11 | 3.12)                                â”‚ -│[default: 3.9]                                              â”‚ +│(>3.10< | 3.11 | 3.12)                                      â”‚ +│[default: 3.10]                                             â”‚ │--include-open-psf-scorecard-sInclude statistics from the Open PSF Scorecard│ │--include-github-stats-GInclude statistics from GitHub│ │--include-actionsInclude Actions recommended for the project│ diff --git a/dev/breeze/doc/images/output_sbom_export-dependency-information.txt b/dev/breeze/doc/images/output_sbom_export-dependency-information.txt index 8be6d09b5019f..d5a5892cd30bc 100644 --- a/dev/breeze/doc/images/output_sbom_export-dependency-information.txt +++ b/dev/breeze/doc/images/output_sbom_export-dependency-information.txt @@ -1 +1 @@ -432576382063ad0f8e30e277713bbc10 +4d738ac97ad21c8dd62bb240022d8d81 diff --git a/dev/breeze/doc/images/output_setup_config.svg b/dev/breeze/doc/images/output_setup_config.svg index ae0d37872a119..4c99903272683 100644 --- a/dev/breeze/doc/images/output_setup_config.svg +++ b/dev/breeze/doc/images/output_setup_config.svg @@ -130,8 +130,8 @@ ╭─ Config flags â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® │--python-pPython major/minor version used in Airflow image for images.│ -│(>3.9< | 3.10 | 3.11 | 3.12)                                â”‚ -│[default: 3.9]                                              â”‚ +│(>3.10< | 3.11 | 3.12)                                      â”‚ +│[default: 3.10]                                             â”‚ │--backend-bDatabase backend to use. If 'none' is chosen, Breeze will start with an invalid│ │database configuration, meaning there will be no database available, and any   â”‚ │attempts to connect to the Airflow database will fail.                         â”‚ diff --git a/dev/breeze/doc/images/output_setup_config.txt b/dev/breeze/doc/images/output_setup_config.txt index 3f69ce1d49f86..6a52e60b0b458 100644 --- a/dev/breeze/doc/images/output_setup_config.txt +++ b/dev/breeze/doc/images/output_setup_config.txt @@ -1 +1 @@ -9509e3954e5aeb6a991fe5b302701d4d +ce0a944326c3418706b7aa90d730e186 diff --git a/dev/breeze/doc/images/output_shell.svg b/dev/breeze/doc/images/output_shell.svg index 2eebfe4d89fb7..34bf0f927e061 100644 --- a/dev/breeze/doc/images/output_shell.svg +++ b/dev/breeze/doc/images/output_shell.svg @@ -578,8 +578,8 @@ ╭─ Execution mode â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® │--python-pPython major/minor version used in Airflow image for   â”‚ │images.                                                â”‚ -│(>3.9< | 3.10 | 3.11 | 3.12)                           â”‚ -│[default: 3.9]                                         â”‚ +│(>3.10< | 3.11 | 3.12)                                 â”‚ +│[default: 3.10]                                        â”‚ │--integrationCore Integrations to enable when running (can be more  â”‚ │than one).                                             â”‚ │(all | all-testable | cassandra | celery | drill |     â”‚ diff --git a/dev/breeze/doc/images/output_shell.txt b/dev/breeze/doc/images/output_shell.txt index 8650401fe3735..ab08dd10a1636 100644 --- a/dev/breeze/doc/images/output_shell.txt +++ b/dev/breeze/doc/images/output_shell.txt @@ -1 +1 @@ -00e479003b0f0fd2e7e81d0703a4b829 +88a0320b81a12411637d9b2596394a3a diff --git a/dev/breeze/doc/images/output_start-airflow.svg b/dev/breeze/doc/images/output_start-airflow.svg index 5e5e599f53334..24ef4793309b7 100644 --- a/dev/breeze/doc/images/output_start-airflow.svg +++ b/dev/breeze/doc/images/output_start-airflow.svg @@ -1,4 +1,4 @@ - + ╭─ Execution mode â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® │--python-pPython major/minor version used in Airflow image for   â”‚ │images.                                                â”‚ -│(>3.9< | 3.10 | 3.11 | 3.12)                           â”‚ -│[default: 3.9]                                         â”‚ +│(>3.10< | 3.11 | 3.12)                                 â”‚ +│[default: 3.10]                                        â”‚ │--platformPlatform for Airflow image.                            â”‚ │(linux/amd64 | linux/arm64 | linux/x86_64 |            â”‚ │linux/aarch64)                                         â”‚ @@ -531,57 +558,66 @@ │[default: selected]                                                                            â”‚ ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Installing packages after entering shell â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® -│--airflow-constraints-locationLocation of airflow constraints to use (remote URL or local context file).│ -│(TEXT)                                                                    â”‚ -│--airflow-constraints-modeMode of constraints for Airflow for CI image building.                 â”‚ -│(constraints-source-providers | constraints | constraints-no-providers)│ -│[default: constraints-source-providers]                                â”‚ -│--airflow-constraints-referenceConstraint reference to use for airflow installation (used in calculated       â”‚ -│constraints URL).                                                              â”‚ -│(TEXT)                                                                         â”‚ -│--airflow-extrasAirflow extras to install when --use-airflow-version is used(TEXT)│ -│[default: ""]                                               â”‚ -│--clean-airflow-installationClean the airflow installation before installing version specified by          â”‚ -│--use-airflow-version.                                                         â”‚ -│--install-selected-providersComma-separated list of providers selected to be installed (implies            â”‚ -│--use-distributions-from-dist).                                                â”‚ -│(TEXT)                                                                         â”‚ -│--distribution-formatFormat of packages that should be installed from dist.(wheel | sdist)│ -│[default: wheel]                                      â”‚ -│--providers-constraints-locationLocation of providers constraints to use (remote URL or local context file).│ -│(TEXT)                                                                      â”‚ -│--providers-constraints-modeMode of constraints for Providers for CI image building.               â”‚ -│(constraints-source-providers | constraints | constraints-no-providers)│ -│[default: constraints-source-providers]                                â”‚ -│--providers-constraints-referenceConstraint reference to use for providers installation (used in calculated     â”‚ -│constraints URL). Can be 'default' in which case the default                   â”‚ -│constraints-reference is used.                                                 â”‚ -│(TEXT)                                                                         â”‚ -│--providers-skip-constraintsDo not use constraints when installing providers.│ -│--use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It can also be version (to â”‚ -│install from PyPI), `none`, `wheel`, or `sdist` to install from `dist` folder, â”‚ -│or VCS URL to install from (https://pip.pypa.io/en/stable/topics/vcs-support/).│ -│Implies --mount-sources `remove`.                                              â”‚ -│(none | wheel | sdist | <airflow_version>)                                     â”‚ -│--allow-pre-releasesAllow pre-releases of Airflow, task-sdk and providers to be installed. Set to  â”‚ -│true automatically for pre-release --use-airflow-version)                      â”‚ -│--use-distributions-from-distInstall all found distributions (--distribution-format determines type) from   â”‚ -│'dist' folder when entering breeze.                                            â”‚ -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Other options â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® -│--forward-credentials-fForward local credentials to container when running.│ -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® -│--answer-aForce answer to questions.   â”‚ -│(y | n | q | yes | no | quit)│ -│--dry-run-DIf dry-run is set, commands are only printed, not      â”‚ -│executed.                                              â”‚ -│--install-airflow-with-constraints/--no-install-airflowInstall airflow in a separate step, with constraints   â”‚ -│-with-constraintsdetermined from package or airflow version.            â”‚ -│[default: install-airflow-with-constraints]            â”‚ -│--verbose-vPrint verbose information about performed steps.│ -│--help-hShow this message and exit.│ +│--airflow-constraints-locationLocation of airflow constraints to use (remote URL or   â”‚ +│local context file).                                    â”‚ +│(TEXT)                                                  â”‚ +│--airflow-constraints-modeMode of constraints for Airflow for CI image building.  â”‚ +│(constraints-source-providers | constraints |           â”‚ +│constraints-no-providers)                               â”‚ +│[default: constraints-source-providers]                 â”‚ +│--airflow-constraints-referenceConstraint reference to use for airflow installation    â”‚ +│(used in calculated constraints URL).                   â”‚ +│(TEXT)                                                  â”‚ +│--airflow-extrasAirflow extras to install when --use-airflow-version is â”‚ +│used                                                    â”‚ +│(TEXT)                                                  â”‚ +│[default: ""]                                           â”‚ +│--clean-airflow-installationClean the airflow installation before installing version│ +│specified by --use-airflow-version.                     â”‚ +│--install-airflow-with-constraints/--no-install-airflow-Install airflow in a separate step, with constraints    â”‚ +│with-constraintsdetermined from package or airflow version.             â”‚ +│[default: install-airflow-with-constraints]             â”‚ +│--install-selected-providersComma-separated list of providers selected to be        â”‚ +│installed (implies --use-distributions-from-dist).      â”‚ +│(TEXT)                                                  â”‚ +│--distribution-formatFormat of packages that should be installed from dist.│ +│(wheel | sdist)                                       â”‚ +│[default: wheel]                                      â”‚ +│--providers-constraints-locationLocation of providers constraints to use (remote URL or â”‚ +│local context file).                                    â”‚ +│(TEXT)                                                  â”‚ +│--providers-constraints-modeMode of constraints for Providers for CI image building.│ +│(constraints-source-providers | constraints |           â”‚ +│constraints-no-providers)                               â”‚ +│[default: constraints-source-providers]                 â”‚ +│--providers-constraints-referenceConstraint reference to use for providers installation  â”‚ +│(used in calculated constraints URL). Can be 'default'  â”‚ +│in which case the default constraints-reference is used.│ +│(TEXT)                                                  â”‚ +│--providers-skip-constraintsDo not use constraints when installing providers.│ +│--use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It  â”‚ +│can also be version (to install from PyPI), `none`,     â”‚ +│`wheel`, or `sdist` to install from `dist` folder, or   â”‚ +│VCS URL to install from                                 â”‚ +│(https://pip.pypa.io/en/stable/topics/vcs-support/).    â”‚ +│Implies --mount-sources `remove`.                       â”‚ +│(none | wheel | sdist | <airflow_version>)              â”‚ +│--allow-pre-releasesAllow pre-releases of Airflow, task-sdk and providers to│ +│be installed. Set to true automatically for pre-release â”‚ +│--use-airflow-version)                                  â”‚ +│--use-distributions-from-distInstall all found distributions (--distribution-format│ +│determines type) from 'dist' folder when entering       â”‚ +│breeze.                                                 â”‚ ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Other options â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® +│--forward-credentials-fForward local credentials to container when running.│ +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® +│--answer-aForce answer to questions.(y | n | q | yes | no | quit)│ +│--dry-run-DIf dry-run is set, commands are only printed, not executed.│ +│--verbose-vPrint verbose information about performed steps.│ +│--help-hShow this message and exit.│ +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_start-airflow.txt b/dev/breeze/doc/images/output_start-airflow.txt index bd5fbeafe8e04..a210aa26ef03c 100644 --- a/dev/breeze/doc/images/output_start-airflow.txt +++ b/dev/breeze/doc/images/output_start-airflow.txt @@ -1 +1 @@ -055a450825e9a8ff29477615629cffc2 +9610c4e7624dd08101801d61232762f6 diff --git a/dev/breeze/doc/images/output_testing_airflow-ctl-tests.svg b/dev/breeze/doc/images/output_testing_airflow-ctl-tests.svg index 67f5620e4ed19..ae2181346697d 100644 --- a/dev/breeze/doc/images/output_testing_airflow-ctl-tests.svg +++ b/dev/breeze/doc/images/output_testing_airflow-ctl-tests.svg @@ -108,8 +108,8 @@ Run airflow-ctl tests - all Airflow CTL tests are non-DB bound tests. ╭─ Test environment â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® -│--python-pPython major/minor version used in Airflow image for images.(>3.9< | 3.10 | 3.11 | 3.12)│ -│[default: 3.9]                                              â”‚ +│--python-pPython major/minor version used in Airflow image for images.(>3.10< | 3.11 | 3.12)│ +│[default: 3.10]                                             â”‚ │--parallelismMaximum number of processes to use while running the operation in parallel.(INTEGER RANGE)│ │[default: 4; 1<=x<=8]                                                      â”‚ ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_testing_airflow-ctl-tests.txt b/dev/breeze/doc/images/output_testing_airflow-ctl-tests.txt index bfce27953dfbc..3d97b4b676ed0 100644 --- a/dev/breeze/doc/images/output_testing_airflow-ctl-tests.txt +++ b/dev/breeze/doc/images/output_testing_airflow-ctl-tests.txt @@ -1 +1 @@ -ded529045d0ce777d7d8664efc7856b4 +7950b438b01dc4d75116ef332c7f2812 diff --git a/dev/breeze/doc/images/output_testing_core-integration-tests.svg b/dev/breeze/doc/images/output_testing_core-integration-tests.svg index 6f8f1fa475ce5..7be29c9cae0d4 100644 --- a/dev/breeze/doc/images/output_testing_core-integration-tests.svg +++ b/dev/breeze/doc/images/output_testing_core-integration-tests.svg @@ -224,8 +224,8 @@ │[default: sqlite]                                                   â”‚ │--no-db-cleanupDo not clear the database before each test module│ │--python-pPython major/minor version used in Airflow image for images.│ -│(>3.9< | 3.10 | 3.11 | 3.12)                                â”‚ -│[default: 3.9]                                              â”‚ +│(>3.10< | 3.11 | 3.12)                                      â”‚ +│[default: 3.10]                                             â”‚ │--postgres-version-PVersion of Postgres used.(>13< | 14 | 15 | 16 | 17)[default: 13]│ │--mysql-version-MVersion of MySQL used.(>8.0< | 8.4)[default: 8.0]│ │--forward-credentials-fForward local credentials to container when running.│ diff --git a/dev/breeze/doc/images/output_testing_core-integration-tests.txt b/dev/breeze/doc/images/output_testing_core-integration-tests.txt index 51fba3c9cfc37..6cc94ab76b850 100644 --- a/dev/breeze/doc/images/output_testing_core-integration-tests.txt +++ b/dev/breeze/doc/images/output_testing_core-integration-tests.txt @@ -1 +1 @@ -6a754e606e87c992f855d23294af88b3 +f9a72bab5c6f0a3d4eabddeb547818fe diff --git a/dev/breeze/doc/images/output_testing_core-tests.svg b/dev/breeze/doc/images/output_testing_core-tests.svg index a31a8d9909fae..145a15f6cae1e 100644 --- a/dev/breeze/doc/images/output_testing_core-tests.svg +++ b/dev/breeze/doc/images/output_testing_core-tests.svg @@ -421,8 +421,8 @@ │[default: sqlite]                                                   â”‚ │--no-db-cleanupDo not clear the database before each test module│ │--python-pPython major/minor version used in Airflow image for images.│ -│(>3.9< | 3.10 | 3.11 | 3.12)                                â”‚ -│[default: 3.9]                                              â”‚ +│(>3.10< | 3.11 | 3.12)                                      â”‚ +│[default: 3.10]                                             â”‚ │--postgres-version-PVersion of Postgres used.(>13< | 14 | 15 | 16 | 17)[default: 13]│ │--mysql-version-MVersion of MySQL used.(>8.0< | 8.4)[default: 8.0]│ │--forward-credentials-fForward local credentials to container when running.│ diff --git a/dev/breeze/doc/images/output_testing_core-tests.txt b/dev/breeze/doc/images/output_testing_core-tests.txt index 289f6b4062c06..a5c89681ead52 100644 --- a/dev/breeze/doc/images/output_testing_core-tests.txt +++ b/dev/breeze/doc/images/output_testing_core-tests.txt @@ -1 +1 @@ -eb0cd2087234798b1d3e9c95120d9a93 +ceb51359e3530c67379ae17cf219647d diff --git a/dev/breeze/doc/images/output_testing_docker-compose-tests.svg b/dev/breeze/doc/images/output_testing_docker-compose-tests.svg index 052f1532eb14d..b46d39d0b370d 100644 --- a/dev/breeze/doc/images/output_testing_docker-compose-tests.svg +++ b/dev/breeze/doc/images/output_testing_docker-compose-tests.svg @@ -122,8 +122,8 @@ ╭─ Docker-compose tests flag â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® │--image-name-nName of the image to verify (overrides --python).(TEXT)│ │--python-pPython major/minor version used in Airflow image for images.│ -│(>3.9< | 3.10 | 3.11 | 3.12)                                â”‚ -│[default: 3.9]                                              â”‚ +│(>3.10< | 3.11 | 3.12)                                      â”‚ +│[default: 3.10]                                             â”‚ │--skip-docker-compose-deletionSkip deletion of docker-compose instance after the test│ │--include-success-outputsWhether to include outputs of successful runs (not shown by default).│ │--github-repository-gGitHub repository used to pull, push run images.(TEXT)│ diff --git a/dev/breeze/doc/images/output_testing_docker-compose-tests.txt b/dev/breeze/doc/images/output_testing_docker-compose-tests.txt index 006c1079ead23..640b88b2b6c0b 100644 --- a/dev/breeze/doc/images/output_testing_docker-compose-tests.txt +++ b/dev/breeze/doc/images/output_testing_docker-compose-tests.txt @@ -1 +1 @@ -31ffebab06e334bd47c15deaf2160736 +51b98fe337251d125d8de538a71a21df diff --git a/dev/breeze/doc/images/output_testing_providers-integration-tests.svg b/dev/breeze/doc/images/output_testing_providers-integration-tests.svg index 71bce41db52bd..af73ffc750a72 100644 --- a/dev/breeze/doc/images/output_testing_providers-integration-tests.svg +++ b/dev/breeze/doc/images/output_testing_providers-integration-tests.svg @@ -227,8 +227,8 @@ │[default: sqlite]                                                   â”‚ │--no-db-cleanupDo not clear the database before each test module│ │--python-pPython major/minor version used in Airflow image for images.│ -│(>3.9< | 3.10 | 3.11 | 3.12)                                â”‚ -│[default: 3.9]                                              â”‚ +│(>3.10< | 3.11 | 3.12)                                      â”‚ +│[default: 3.10]                                             â”‚ │--postgres-version-PVersion of Postgres used.(>13< | 14 | 15 | 16 | 17)[default: 13]│ │--mysql-version-MVersion of MySQL used.(>8.0< | 8.4)[default: 8.0]│ │--forward-credentials-fForward local credentials to container when running.│ diff --git a/dev/breeze/doc/images/output_testing_providers-integration-tests.txt b/dev/breeze/doc/images/output_testing_providers-integration-tests.txt index 1b6995e16c3d1..6a4377016c653 100644 --- a/dev/breeze/doc/images/output_testing_providers-integration-tests.txt +++ b/dev/breeze/doc/images/output_testing_providers-integration-tests.txt @@ -1 +1 @@ -48aafaf7c9282ae70307e168ffefb0ab +f8cae5908b307cdc14d48857da8e058d diff --git a/dev/breeze/doc/images/output_testing_providers-tests.svg b/dev/breeze/doc/images/output_testing_providers-tests.svg index e1aec6f18a145..cc0225e07660e 100644 --- a/dev/breeze/doc/images/output_testing_providers-tests.svg +++ b/dev/breeze/doc/images/output_testing_providers-tests.svg @@ -452,8 +452,8 @@ │[default: sqlite]                                                   â”‚ │--no-db-cleanupDo not clear the database before each test module│ │--python-pPython major/minor version used in Airflow image for images.│ -│(>3.9< | 3.10 | 3.11 | 3.12)                                â”‚ -│[default: 3.9]                                              â”‚ +│(>3.10< | 3.11 | 3.12)                                      â”‚ +│[default: 3.10]                                             â”‚ │--postgres-version-PVersion of Postgres used.(>13< | 14 | 15 | 16 | 17)[default: 13]│ │--mysql-version-MVersion of MySQL used.(>8.0< | 8.4)[default: 8.0]│ │--forward-credentials-fForward local credentials to container when running.│ diff --git a/dev/breeze/doc/images/output_testing_providers-tests.txt b/dev/breeze/doc/images/output_testing_providers-tests.txt index 8a8b1581162bc..8c77d66e03ade 100644 --- a/dev/breeze/doc/images/output_testing_providers-tests.txt +++ b/dev/breeze/doc/images/output_testing_providers-tests.txt @@ -1 +1 @@ -3e66e638bf0d2539a9f6da0758e145a9 +40d4ee42e19c31200593c168157a2521 diff --git a/dev/breeze/doc/images/output_testing_python-api-client-tests.svg b/dev/breeze/doc/images/output_testing_python-api-client-tests.svg index 0babcf9754813..ee1f256149479 100644 --- a/dev/breeze/doc/images/output_testing_python-api-client-tests.svg +++ b/dev/breeze/doc/images/output_testing_python-api-client-tests.svg @@ -205,8 +205,8 @@ │[default: sqlite]                                                   â”‚ │--no-db-cleanupDo not clear the database before each test module│ │--python-pPython major/minor version used in Airflow image for images.│ -│(>3.9< | 3.10 | 3.11 | 3.12)                                â”‚ -│[default: 3.9]                                              â”‚ +│(>3.10< | 3.11 | 3.12)                                      â”‚ +│[default: 3.10]                                             â”‚ │--postgres-version-PVersion of Postgres used.(>13< | 14 | 15 | 16 | 17)[default: 13]│ │--mysql-version-MVersion of MySQL used.(>8.0< | 8.4)[default: 8.0]│ │--forward-credentials-fForward local credentials to container when running.│ diff --git a/dev/breeze/doc/images/output_testing_python-api-client-tests.txt b/dev/breeze/doc/images/output_testing_python-api-client-tests.txt index d357add35d963..feaff93e8dbfa 100644 --- a/dev/breeze/doc/images/output_testing_python-api-client-tests.txt +++ b/dev/breeze/doc/images/output_testing_python-api-client-tests.txt @@ -1 +1 @@ -bd5df7204b8255b938c2910ae46c578f +92211c9044658c3f7f3ca81af06f7c43 diff --git a/dev/breeze/doc/images/output_testing_system-tests.svg b/dev/breeze/doc/images/output_testing_system-tests.svg index 023c7f6f24596..14594a21cdb73 100644 --- a/dev/breeze/doc/images/output_testing_system-tests.svg +++ b/dev/breeze/doc/images/output_testing_system-tests.svg @@ -296,8 +296,8 @@ │[default: sqlite]                                                   â”‚ │--no-db-cleanupDo not clear the database before each test module│ │--python-pPython major/minor version used in Airflow image for images.│ -│(>3.9< | 3.10 | 3.11 | 3.12)                                â”‚ -│[default: 3.9]                                              â”‚ +│(>3.10< | 3.11 | 3.12)                                      â”‚ +│[default: 3.10]                                             â”‚ │--postgres-version-PVersion of Postgres used.(>13< | 14 | 15 | 16 | 17)[default: 13]│ │--mysql-version-MVersion of MySQL used.(>8.0< | 8.4)[default: 8.0]│ │--forward-credentials-fForward local credentials to container when running.│ diff --git a/dev/breeze/doc/images/output_testing_system-tests.txt b/dev/breeze/doc/images/output_testing_system-tests.txt index 269ab6c161ba3..68d7fffd36077 100644 --- a/dev/breeze/doc/images/output_testing_system-tests.txt +++ b/dev/breeze/doc/images/output_testing_system-tests.txt @@ -1 +1 @@ -493c89674a60b43795166876bb57f31b +96c2e355b805d8d2a17d04978f4c6cdc diff --git a/dev/breeze/doc/images/output_testing_task-sdk-tests.svg b/dev/breeze/doc/images/output_testing_task-sdk-tests.svg index 36bb1a3734669..9dbbff6b7cc4c 100644 --- a/dev/breeze/doc/images/output_testing_task-sdk-tests.svg +++ b/dev/breeze/doc/images/output_testing_task-sdk-tests.svg @@ -170,8 +170,8 @@ ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Test environment â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â•® │--python-pPython major/minor version used in Airflow image for images.│ -│(>3.9< | 3.10 | 3.11 | 3.12)                                â”‚ -│[default: 3.9]                                              â”‚ +│(>3.10< | 3.11 | 3.12)                                      â”‚ +│[default: 3.10]                                             â”‚ │--forward-credentials-fForward local credentials to container when running.│ │--force-sa-warnings/--no-force-sa-warningsEnable `sqlalchemy.exc.MovedIn20Warning` during the tests runs.│ │[default: force-sa-warnings]                                   â”‚ diff --git a/dev/breeze/doc/images/output_testing_task-sdk-tests.txt b/dev/breeze/doc/images/output_testing_task-sdk-tests.txt index e6a7b3b14f7d7..3e66367f5db6d 100644 --- a/dev/breeze/doc/images/output_testing_task-sdk-tests.txt +++ b/dev/breeze/doc/images/output_testing_task-sdk-tests.txt @@ -1 +1 @@ -05f9a5f75222a48aa104060aecab4bf5 +a39f22cb9ce828c2793aad73ac80bd07 diff --git a/dev/breeze/pyproject.toml b/dev/breeze/pyproject.toml index 82acc5dd06a78..73114320872ec 100644 --- a/dev/breeze/pyproject.toml +++ b/dev/breeze/pyproject.toml @@ -41,7 +41,7 @@ classifiers = [ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", ] -requires-python = "~=3.9" +requires-python = "~=3.10" dependencies = [ "black>=23.11.0", diff --git a/dev/breeze/src/airflow_breeze/commands/minor_release_command.py b/dev/breeze/src/airflow_breeze/commands/minor_release_command.py index 7e7ffeb6e60bb..4dec224fef0a7 100644 --- a/dev/breeze/src/airflow_breeze/commands/minor_release_command.py +++ b/dev/breeze/src/airflow_breeze/commands/minor_release_command.py @@ -191,7 +191,7 @@ def create_minor_version_branch(version_branch): create_branch(version_branch) # Build ci image if confirm_action("Build latest breeze image?"): - run_command(["breeze", "ci-image", "build", "--python", "3.9"], check=True) + run_command(["breeze", "ci-image", "build", "--python", "3.10"], check=True) # Update default branches update_default_branch(version_branch) # Commit changes diff --git a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py index 603ec7a59ff82..9ab6bb4ce9fc6 100644 --- a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py @@ -38,7 +38,7 @@ from multiprocessing import Pool from pathlib import Path from subprocess import DEVNULL -from typing import IO, TYPE_CHECKING, Any, Literal, NamedTuple, Union +from typing import IO, TYPE_CHECKING, Any, Literal, NamedTuple import click from rich.progress import Progress @@ -3747,7 +3747,7 @@ def generate_issue_content( ): from github import Github, Issue, PullRequest, UnknownObjectException - PullRequestOrIssue = Union[PullRequest.PullRequest, Issue.Issue] + PullRequestOrIssue = PullRequest.PullRequest | Issue.Issue verbose = get_verbose() previous = previous_release diff --git a/dev/breeze/src/airflow_breeze/global_constants.py b/dev/breeze/src/airflow_breeze/global_constants.py index a81d7cae2e2f8..184c508f6e0f8 100644 --- a/dev/breeze/src/airflow_breeze/global_constants.py +++ b/dev/breeze/src/airflow_breeze/global_constants.py @@ -44,7 +44,7 @@ APACHE_AIRFLOW_GITHUB_REPOSITORY = "apache/airflow" # Checked before putting in build cache -ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS = ["3.9", "3.10", "3.11", "3.12"] +ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS = ["3.10", "3.11", "3.12"] DEFAULT_PYTHON_MAJOR_MINOR_VERSION = ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS[0] ALLOWED_ARCHITECTURES = [Architecture.X86_64, Architecture.ARM] # Database Backends used when starting Breeze. The "none" value means that the configuration is invalid. @@ -384,7 +384,7 @@ def get_default_platform_machine() -> str: # All python versions include all past python versions available in previous branches # Even if we remove them from the main version. This is needed to make sure we can cherry-pick # changes from main to the previous branch. -ALL_PYTHON_MAJOR_MINOR_VERSIONS = ["3.9", "3.10", "3.11", "3.12"] +ALL_PYTHON_MAJOR_MINOR_VERSIONS = ["3.10", "3.11", "3.12"] CURRENT_PYTHON_MAJOR_MINOR_VERSIONS = ALL_PYTHON_MAJOR_MINOR_VERSIONS CURRENT_POSTGRES_VERSIONS = ["13", "14", "15", "16", "17"] DEFAULT_POSTGRES_VERSION = CURRENT_POSTGRES_VERSIONS[0] @@ -727,26 +727,7 @@ def generate_provider_dependencies_if_needed(): # END OF EXTRAS LIST UPDATED BY PRE COMMIT ] -PROVIDERS_COMPATIBILITY_TESTS_MATRIX: list[dict[str, str | list[str]]] = [ - { - "python-version": "3.9", - "airflow-version": "2.10.5", - "remove-providers": "cloudant common.messaging fab git keycloak", - "run-tests": "true", - }, - { - "python-version": "3.9", - "airflow-version": "2.11.0", - "remove-providers": "cloudant common.messaging fab git keycloak", - "run-tests": "true", - }, - { - "python-version": "3.9", - "airflow-version": "3.0.2", - "remove-providers": "cloudant", - "run-tests": "true", - }, -] +PROVIDERS_COMPATIBILITY_TESTS_MATRIX: list[dict[str, str | list[str]]] = [] # Number of slices for low dep tests NUMBER_OF_LOW_DEP_SLICES = 5 diff --git a/dev/breeze/src/airflow_breeze/params/common_build_params.py b/dev/breeze/src/airflow_breeze/params/common_build_params.py index 95ca20283b4ee..fd20d27b36436 100644 --- a/dev/breeze/src/airflow_breeze/params/common_build_params.py +++ b/dev/breeze/src/airflow_breeze/params/common_build_params.py @@ -67,7 +67,7 @@ class CommonBuildParams: prepare_buildx_cache: bool = False python_image: str | None = None push: bool = False - python: str = "3.9" + python: str = "3.10" uv_http_timeout: int = DEFAULT_UV_HTTP_TIMEOUT dry_run: bool = False version_suffix: str | None = None diff --git a/dev/breeze/src/airflow_breeze/utils/image.py b/dev/breeze/src/airflow_breeze/utils/image.py index 0dcde4c33c775..55c9b54a8f621 100644 --- a/dev/breeze/src/airflow_breeze/utils/image.py +++ b/dev/breeze/src/airflow_breeze/utils/image.py @@ -18,7 +18,8 @@ import subprocess import time -from typing import TYPE_CHECKING, Callable +from collections.abc import Callable +from typing import TYPE_CHECKING from airflow_breeze.global_constants import ( ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS, diff --git a/dev/breeze/src/airflow_breeze/utils/packages.py b/dev/breeze/src/airflow_breeze/utils/packages.py index 08b63372f0d8c..e330795d24c4d 100644 --- a/dev/breeze/src/airflow_breeze/utils/packages.py +++ b/dev/breeze/src/airflow_breeze/utils/packages.py @@ -592,7 +592,7 @@ def get_min_airflow_version(provider_id: str) -> str: def get_python_requires(provider_id: str) -> str: - python_requires = "~=3.9" + python_requires = "~=3.10" provider_details = get_provider_details(provider_id=provider_id) for p in provider_details.excluded_python_versions: python_requires += f", !={p}" diff --git a/dev/breeze/src/airflow_breeze/utils/parallel.py b/dev/breeze/src/airflow_breeze/utils/parallel.py index 1d6af2d174865..68dd0b30967a4 100644 --- a/dev/breeze/src/airflow_breeze/utils/parallel.py +++ b/dev/breeze/src/airflow_breeze/utils/parallel.py @@ -217,7 +217,7 @@ def bytes2human(n): def get_printable_value(key: str, value: Any) -> str: if key == "percent": return f"{value} %" - if isinstance(value, (int, float)): + if isinstance(value, int | float): return bytes2human(value) return str(value) diff --git a/dev/breeze/src/airflow_breeze/utils/path_utils.py b/dev/breeze/src/airflow_breeze/utils/path_utils.py index a9cef7c0dc1fd..be9f3c6efc82b 100644 --- a/dev/breeze/src/airflow_breeze/utils/path_utils.py +++ b/dev/breeze/src/airflow_breeze/utils/path_utils.py @@ -101,8 +101,12 @@ def get_package_setup_metadata_hash() -> str: metadata = distribution("apache-airflow-breeze").metadata try: description = metadata.json["description"] # type: ignore[attr-defined] - except AttributeError: - description = metadata.as_string() + except (AttributeError, KeyError): + description = str(metadata["Description"]) if "Description" in metadata else "" + + if isinstance(description, list): + description = "\n".join(description) + for line in description.splitlines(keepends=False): if line.startswith(prefix): return line[len(prefix) :] diff --git a/dev/breeze/src/airflow_breeze/utils/run_utils.py b/dev/breeze/src/airflow_breeze/utils/run_utils.py index a60b6cf681482..dffb56c003012 100644 --- a/dev/breeze/src/airflow_breeze/utils/run_utils.py +++ b/dev/breeze/src/airflow_breeze/utils/run_utils.py @@ -30,7 +30,8 @@ import sys from collections.abc import Mapping from pathlib import Path -from typing import Union +from subprocess import CalledProcessError, CompletedProcess +from typing import Any from rich.markup import escape @@ -48,7 +49,8 @@ ) from airflow_breeze.utils.shared_options import get_dry_run, get_verbose -RunCommandResult = Union[subprocess.CompletedProcess, subprocess.CalledProcessError] +# RunCommandResult = type[subprocess.CompletedProcess] | type[subprocess.CalledProcessError] +RunCommandResult = CompletedProcess[Any] | CalledProcessError OPTION_MATCHER = re.compile(r"^[A-Z_]*=.*$") diff --git a/dev/breeze/tests/test_cache.py b/dev/breeze/tests/test_cache.py index 2eb501189b7d3..7d97369e2d468 100644 --- a/dev/breeze/tests/test_cache.py +++ b/dev/breeze/tests/test_cache.py @@ -36,8 +36,10 @@ [ ("backend", "mysql", (True, ["sqlite", "mysql", "postgres", "none"]), None), ("backend", "xxx", (False, ["sqlite", "mysql", "postgres", "none"]), None), - ("python_major_minor_version", "3.9", (True, ["3.9", "3.10", "3.11", "3.12"]), None), - ("python_major_minor_version", "3.8", (False, ["3.9", "3.10", "3.11", "3.12"]), None), + ("python_major_minor_version", "3.10", (True, ["3.10", "3.11", "3.12"]), None), + ("python_major_minor_version", "3.9", (False, ["3.10", "3.11", "3.12"]), None), + ("python_major_minor_version", "3.8", (False, ["3.10", "3.11", "3.12"]), None), + ("python_major_minor_version", "3.7", (False, ["3.10", "3.11", "3.12"]), None), ("missing", "value", None, AttributeError), ], ) diff --git a/dev/breeze/tests/test_selective_checks.py b/dev/breeze/tests/test_selective_checks.py index cc3b5e4abc0df..1b052ce7ada62 100644 --- a/dev/breeze/tests/test_selective_checks.py +++ b/dev/breeze/tests/test_selective_checks.py @@ -16,7 +16,6 @@ # under the License. from __future__ import annotations -import itertools import json import re from typing import Any @@ -54,21 +53,15 @@ ALL_PYTHON_VERSIONS_AS_STRING = " ".join(ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS) ALL_PYTHON_VERSIONS_AS_LIST = "[" + ", ".join([f"'{v}'" for v in ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS]) + "]" +PYTHON_K8S_COMBO_LENGTH = max(len(ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS), len(ALLOWED_KUBERNETES_VERSIONS)) +PYTHON_VERSIONS_MAX = (ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS * 2)[:PYTHON_K8S_COMBO_LENGTH] +KUBERNETES_VERSIONS_MAX = (ALLOWED_KUBERNETES_VERSIONS * 2)[:PYTHON_K8S_COMBO_LENGTH] + ZIP_PYTHON_AND_KUBERNETES_VERSIONS_AS_STRING = " ".join( - [ - f"{t[0]}-{t[1]}" - for t in itertools.zip_longest(ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS, ALLOWED_KUBERNETES_VERSIONS) - ] + [f"{t[0]}-{t[1]}" for t in zip(PYTHON_VERSIONS_MAX, KUBERNETES_VERSIONS_MAX)] ) ZIP_PYTHON_AND_KUBERNETES_VERSIONS_AS_LIST = ( - "[" - + ", ".join( - [ - f"'{t[0]}-{t[1]}'" - for t in itertools.zip_longest(ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS, ALLOWED_KUBERNETES_VERSIONS) - ] - ) - + "]" + "[" + ", ".join([f"'{t[0]}-{t[1]}'" for t in zip(PYTHON_VERSIONS_MAX, KUBERNETES_VERSIONS_MAX)]) + "]" ) @@ -814,9 +807,9 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): ("generated/provider_dependencies.json",), { "selected-providers-list-as-string": ALL_PROVIDERS_AFFECTED, - "all-python-versions": "['3.9', '3.10', '3.11', '3.12']", + "all-python-versions": "['3.10', '3.11', '3.12']", "all-python-versions-list-as-string": ALL_PYTHON_VERSIONS_AS_STRING, - "python-versions": "['3.9', '3.10', '3.11', '3.12']", + "python-versions": "['3.10', '3.11', '3.12']", "python-versions-list-as-string": ALL_PYTHON_VERSIONS_AS_STRING, "ci-image-build": "true", "prod-image-build": "true", @@ -841,9 +834,9 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): ("generated/provider_dependencies.json",), { "selected-providers-list-as-string": ALL_PROVIDERS_AFFECTED, - "all-python-versions": "['3.9', '3.10', '3.11', '3.12']", + "all-python-versions": "['3.10', '3.11', '3.12']", "all-python-versions-list-as-string": ALL_PYTHON_VERSIONS_AS_STRING, - "python-versions": "['3.9', '3.10', '3.11', '3.12']", + "python-versions": "['3.10', '3.11', '3.12']", "python-versions-list-as-string": ALL_PYTHON_VERSIONS_AS_STRING, "ci-image-build": "true", "prod-image-build": "true", @@ -1345,7 +1338,10 @@ def test_excluded_providers(): ) assert_outputs_are_printed( { - "excluded-providers-as-string": json.dumps({DEFAULT_PYTHON_MAJOR_MINOR_VERSION: ["cloudant"]}), + # In case dict is empty we have no exclusions. + # if you need to exclude version use syntax + # {DEFAULT_PYTHON_MAJOR_MINOR_VERSION: ["provider_name_package"]} + "excluded-providers-as-string": json.dumps({}), }, str(stderr), ) @@ -1448,11 +1444,11 @@ def test_full_test_needed_when_scripts_changes(files: tuple[str, ...], expected_ { "selected-providers-list-as-string": ALL_PROVIDERS_AFFECTED, "all-versions": "true", - "all-python-versions": "['3.9', '3.10', '3.11', '3.12']", + "all-python-versions": "['3.10', '3.11', '3.12']", "all-python-versions-list-as-string": ALL_PYTHON_VERSIONS_AS_STRING, "mysql-versions": "['8.0', '8.4']", "postgres-versions": "['13', '14', '15', '16', '17']", - "python-versions": "['3.9', '3.10', '3.11', '3.12']", + "python-versions": "['3.10', '3.11', '3.12']", "python-versions-list-as-string": ALL_PYTHON_VERSIONS_AS_STRING, "kubernetes-versions": ALL_KUBERNETES_VERSIONS_AS_LIST, "kubernetes-versions-list-as-string": ALL_KUBERNETES_VERSIONS_AS_STRING, @@ -1827,7 +1823,7 @@ def test_expected_output_pull_request_v2_7( "main", { "selected-providers-list-as-string": ALL_PROVIDERS_AFFECTED, - "all-python-versions": "['3.9', '3.10', '3.11', '3.12']", + "all-python-versions": "['3.10', '3.11', '3.12']", "all-python-versions-list-as-string": ALL_PYTHON_VERSIONS_AS_STRING, "ci-image-build": "true", "prod-image-build": "true", @@ -1848,7 +1844,7 @@ def test_expected_output_pull_request_v2_7( (), "v2-3-stable", { - "all-python-versions": "['3.9', '3.10', '3.11', '3.12']", + "all-python-versions": "['3.10', '3.11', '3.12']", "all-python-versions-list-as-string": ALL_PYTHON_VERSIONS_AS_STRING, "ci-image-build": "true", "prod-image-build": "true", @@ -1871,7 +1867,7 @@ def test_expected_output_pull_request_v2_7( "main", { "selected-providers-list-as-string": ALL_PROVIDERS_AFFECTED, - "all-python-versions": "['3.9', '3.10', '3.11', '3.12']", + "all-python-versions": "['3.10', '3.11', '3.12']", "all-python-versions-list-as-string": ALL_PYTHON_VERSIONS_AS_STRING, "ci-image-build": "true", "prod-image-build": "true", @@ -2155,7 +2151,7 @@ def test_files_provided_trigger_full_build_for_any_event_type(github_event): ) assert_outputs_are_printed( { - "all-python-versions": "['3.9', '3.10', '3.11', '3.12']", + "all-python-versions": "['3.10', '3.11', '3.12']", "all-python-versions-list-as-string": ALL_PYTHON_VERSIONS_AS_STRING, "ci-image-build": "true", "prod-image-build": "true", diff --git a/dev/breeze/tests/test_shell_params.py b/dev/breeze/tests/test_shell_params.py index dbcd085ae56d1..917e124972dc2 100644 --- a/dev/breeze/tests/test_shell_params.py +++ b/dev/breeze/tests/test_shell_params.py @@ -43,30 +43,30 @@ ), pytest.param( {}, - {"python": "3.9"}, + {"python": "3.10"}, { - "AIRFLOW_CI_IMAGE": f"ghcr.io/apache/airflow/{AIRFLOW_BRANCH}/ci/python3.9", - "PYTHON_MAJOR_MINOR_VERSION": "3.9", + "AIRFLOW_CI_IMAGE": f"ghcr.io/apache/airflow/{AIRFLOW_BRANCH}/ci/python3.10", + "PYTHON_MAJOR_MINOR_VERSION": "3.10", }, - id="python3.9", + id="python3.10", ), pytest.param( {}, - {"airflow_branch": "v2-7-test"}, + {"airflow_branch": "v3-0-test"}, { - "DEFAULT_BRANCH": "v2-7-test", - "AIRFLOW_CI_IMAGE": "ghcr.io/apache/airflow/v2-7-test/ci/python3.9", - "PYTHON_MAJOR_MINOR_VERSION": "3.9", + "DEFAULT_BRANCH": "v3-0-test", + "AIRFLOW_CI_IMAGE": "ghcr.io/apache/airflow/v3-0-test/ci/python3.10", + "PYTHON_MAJOR_MINOR_VERSION": "3.10", }, id="With release branch", ), pytest.param( - {"DEFAULT_BRANCH": "v2-4-test"}, + {"DEFAULT_BRANCH": "v3-0-test"}, {}, { "DEFAULT_BRANCH": AIRFLOW_BRANCH, # DEFAULT_BRANCH is overridden from sources - "AIRFLOW_CI_IMAGE": f"ghcr.io/apache/airflow/{AIRFLOW_BRANCH}/ci/python3.9", - "PYTHON_MAJOR_MINOR_VERSION": "3.9", + "AIRFLOW_CI_IMAGE": f"ghcr.io/apache/airflow/{AIRFLOW_BRANCH}/ci/python3.10", + "PYTHON_MAJOR_MINOR_VERSION": "3.10", }, id="Branch variable from sources not from original env", ), diff --git a/dev/pyproject.toml b/dev/pyproject.toml index 48e83b1889497..d81ae70ee33a5 100644 --- a/dev/pyproject.toml +++ b/dev/pyproject.toml @@ -26,7 +26,7 @@ description = "Development tools for Apache Airflow" classifiers = [ "Private :: Do Not Upload", ] -requires-python = "~=3.9,<3.13" +requires-python = "~=3.10,<3.13" authors = [ { name = "Apache Software Foundation", email = "dev@airflow.apache.org" }, ] diff --git a/dev/refresh_images.sh b/dev/refresh_images.sh index 5ac46d74f9af5..46700cd75ee4e 100755 --- a/dev/refresh_images.sh +++ b/dev/refresh_images.sh @@ -26,7 +26,7 @@ export PLATFORM=${PLATFORM:="linux/amd64,linux/arm64"} breeze setup self-upgrade --use-current-airflow-sources -for PYTHON in 3.9 3.10 3.11 3.12 +for PYTHON in 3.10 3.11 3.12 do breeze ci-image build \ --builder airflow_cache \ diff --git a/dev/retag_docker_images.py b/dev/retag_docker_images.py index edb4c6856c7b1..96aad03b8feeb 100755 --- a/dev/retag_docker_images.py +++ b/dev/retag_docker_images.py @@ -32,7 +32,7 @@ import rich_click as click -PYTHON_VERSIONS = ["3.9", "3.10", "3.11", "3.12"] +PYTHON_VERSIONS = ["3.10", "3.11", "3.12"] GHCR_IO_PREFIX = "ghcr.io" diff --git a/devel-common/src/docs/build_docs.py b/devel-common/src/docs/build_docs.py index f45706f1bd47b..081561ff3badd 100755 --- a/devel-common/src/docs/build_docs.py +++ b/devel-common/src/docs/build_docs.py @@ -29,9 +29,9 @@ import shutil import sys from collections import defaultdict -from collections.abc import Iterable, Sequence +from collections.abc import Callable, Iterable, Sequence from pathlib import Path -from typing import Any, Callable, NamedTuple, TypeVar +from typing import Any, NamedTuple, TypeVar import rich_click as click from click import Choice diff --git a/devel-common/src/sphinx_exts/operators_and_hooks_ref.py b/devel-common/src/sphinx_exts/operators_and_hooks_ref.py index 496a45db09ba5..54d4fe86215ca 100644 --- a/devel-common/src/sphinx_exts/operators_and_hooks_ref.py +++ b/devel-common/src/sphinx_exts/operators_and_hooks_ref.py @@ -287,7 +287,7 @@ def analyze_decorators(node, _file_path, object_type, _class_name=None): if isinstance(child, ast.ClassDef): analyze_decorators(child, file_path, object_type="class") deprecations.extend(_iter_module_for_deprecations(child, file_path, class_name=child.name)) - elif isinstance(child, (ast.FunctionDef, ast.AsyncFunctionDef)): + elif isinstance(child, ast.FunctionDef | ast.AsyncFunctionDef): analyze_decorators( child, file_path, _class_name=class_name, object_type="method" if class_name else "function" ) diff --git a/devel-common/src/sphinx_exts/providers_extensions.py b/devel-common/src/sphinx_exts/providers_extensions.py index 7e040d53870a6..78a0a40df820a 100644 --- a/devel-common/src/sphinx_exts/providers_extensions.py +++ b/devel-common/src/sphinx_exts/providers_extensions.py @@ -20,10 +20,10 @@ import ast import os -from collections.abc import Iterable +from collections.abc import Callable, Iterable from functools import partial from pathlib import Path -from typing import Any, Callable +from typing import Any # No stub exists for docutils.parsers.rst.directives. See https://github.com/python/typeshed/issues/5755. from provider_yaml_utils import load_package_data @@ -140,7 +140,7 @@ def get_import_mappings(tree) -> dict[str, str]: """ imports = {} for node in ast.walk(tree): - if isinstance(node, (ast.Import, ast.ImportFrom)): + if isinstance(node, ast.Import | ast.ImportFrom): for alias in node.names: module_prefix = f"{node.module}." if hasattr(node, "module") and node.module else "" imports[alias.asname or alias.name] = f"{module_prefix}{alias.name}" diff --git a/devel-common/src/sphinx_exts/removemarktransform.py b/devel-common/src/sphinx_exts/removemarktransform.py index bb65d026bce93..9561baa9ff43c 100644 --- a/devel-common/src/sphinx_exts/removemarktransform.py +++ b/devel-common/src/sphinx_exts/removemarktransform.py @@ -61,7 +61,7 @@ def is_pycode(node: nodes.literal_block) -> bool: if language == "guess": try: lexer = guess_lexer(node.rawsource) - return isinstance(lexer, (PythonLexer, Python3Lexer)) + return isinstance(lexer, PythonLexer | Python3Lexer) except Exception: pass diff --git a/devel-common/src/sphinx_exts/substitution_extensions.py b/devel-common/src/sphinx_exts/substitution_extensions.py index faa9501ffee77..5fe68e1d55106 100644 --- a/devel-common/src/sphinx_exts/substitution_extensions.py +++ b/devel-common/src/sphinx_exts/substitution_extensions.py @@ -60,7 +60,7 @@ class SubstitutionCodeBlockTransform(SphinxTransform): def apply(self, **kwargs: Any) -> None: def condition(node): - return isinstance(node, (nodes.literal_block, nodes.literal)) + return isinstance(node, nodes.literal_block | nodes.literal) for node in self.document.traverse(condition): if _SUBSTITUTION_OPTION_NAME not in node: diff --git a/devel-common/src/tests_common/_internals/capture_warnings.py b/devel-common/src/tests_common/_internals/capture_warnings.py index cc17ae5cf0a11..784b4174c9207 100644 --- a/devel-common/src/tests_common/_internals/capture_warnings.py +++ b/devel-common/src/tests_common/_internals/capture_warnings.py @@ -24,11 +24,11 @@ import site import sys import warnings -from collections.abc import Generator +from collections.abc import Callable, Generator from contextlib import contextmanager from dataclasses import asdict, dataclass from pathlib import Path -from typing import TYPE_CHECKING, Callable +from typing import TYPE_CHECKING import pytest diff --git a/devel-common/src/tests_common/_internals/forbidden_warnings.py b/devel-common/src/tests_common/_internals/forbidden_warnings.py index 6e231160e70b9..77e08c9320f49 100644 --- a/devel-common/src/tests_common/_internals/forbidden_warnings.py +++ b/devel-common/src/tests_common/_internals/forbidden_warnings.py @@ -34,7 +34,7 @@ class ForbiddenWarningsPlugin: def __init__(self, config: pytest.Config, forbidden_warnings: tuple[str, ...]): # Set by a pytest_configure hook in conftest deprecations_ignore = config.inicfg["airflow_deprecations_ignore"] - if isinstance(deprecations_ignore, (str, os.PathLike)): + if isinstance(deprecations_ignore, str | os.PathLike): self.deprecations_ignore = [deprecations_ignore] else: self.deprecations_ignore = deprecations_ignore diff --git a/devel-common/src/tests_common/pytest_plugin.py b/devel-common/src/tests_common/pytest_plugin.py index ba3e209901938..0edee542cd67e 100644 --- a/devel-common/src/tests_common/pytest_plugin.py +++ b/devel-common/src/tests_common/pytest_plugin.py @@ -26,11 +26,11 @@ import subprocess import sys import warnings -from collections.abc import Generator +from collections.abc import Callable, Generator from contextlib import ExitStack, suppress from datetime import datetime, timedelta, timezone from pathlib import Path -from typing import TYPE_CHECKING, Any, Callable, Protocol, TypeVar +from typing import TYPE_CHECKING, Any, Protocol, TypeVar from unittest import mock import pytest diff --git a/devel-common/src/tests_common/test_utils/perf/perf_kit/sqlalchemy.py b/devel-common/src/tests_common/test_utils/perf/perf_kit/sqlalchemy.py index ff17f17b1962b..3ef6ff84fe0b5 100644 --- a/devel-common/src/tests_common/test_utils/perf/perf_kit/sqlalchemy.py +++ b/devel-common/src/tests_common/test_utils/perf/perf_kit/sqlalchemy.py @@ -19,7 +19,7 @@ import os import time import traceback -from typing import Callable +from collections.abc import Callable from sqlalchemy import event diff --git a/devel-common/src/tests_common/test_utils/system_tests.py b/devel-common/src/tests_common/test_utils/system_tests.py index 1ba90d24ec811..0950922308867 100644 --- a/devel-common/src/tests_common/test_utils/system_tests.py +++ b/devel-common/src/tests_common/test_utils/system_tests.py @@ -18,7 +18,8 @@ import logging import os -from typing import TYPE_CHECKING, Callable +from collections.abc import Callable +from typing import TYPE_CHECKING import pytest from tabulate import tabulate diff --git a/generated/PYPI_README.md b/generated/PYPI_README.md index 247aca59627da..def66279f2164 100644 --- a/generated/PYPI_README.md +++ b/generated/PYPI_README.md @@ -58,7 +58,7 @@ Apache Airflow is tested with: | | Main version (dev) | Stable version (3.0.2) | |------------|------------------------|------------------------| -| Python | 3.9, 3.10, 3.11, 3.12 | 3.9, 3.10, 3.11, 3.12 | +| Python | 3.10, 3.11, 3.12 | 3.9, 3.10, 3.11, 3.12 | | Platform | AMD64/ARM64(\*) | AMD64/ARM64(\*) | | Kubernetes | 1.30, 1.31, 1.32, 1.33 | 1.30, 1.31, 1.32, 1.33 | | PostgreSQL | 13, 14, 15, 16, 17 | 13, 14, 15, 16, 17 | diff --git a/performance/src/performance_dags/performance_dag/performance_dag_utils.py b/performance/src/performance_dags/performance_dag/performance_dag_utils.py index e37264a228f83..7dbf79c0e944d 100644 --- a/performance/src/performance_dags/performance_dag/performance_dag_utils.py +++ b/performance/src/performance_dags/performance_dag/performance_dag_utils.py @@ -24,10 +24,10 @@ import re import tempfile from collections import OrderedDict +from collections.abc import Callable from contextlib import contextmanager from datetime import datetime, timedelta from shutil import copyfile -from typing import Callable import airflow diff --git a/providers-summary-docs/installing-from-pypi.rst b/providers-summary-docs/installing-from-pypi.rst index 86f2cf61e990d..e7ceb98161c36 100644 --- a/providers-summary-docs/installing-from-pypi.rst +++ b/providers-summary-docs/installing-from-pypi.rst @@ -45,6 +45,6 @@ Typical command to install Airflow from PyPI looks like below (you need to use t .. code-block:: - pip install "apache-airflow-providers-celery" --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.10.2/constraints-3.9.txt" + pip install "apache-airflow-providers-celery" --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.10.2/constraints-3.10.txt" This is an example, see :doc:`apache-airflow:installation/installing-from-pypi` for more examples, including how to upgrade the providers. diff --git a/providers/airbyte/pyproject.toml b/providers/airbyte/pyproject.toml index 1c3499c67052d..c9689d8a43185 100644 --- a/providers/airbyte/pyproject.toml +++ b/providers/airbyte/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/alibaba/pyproject.toml b/providers/alibaba/pyproject.toml index bd2a0535e8708..f59305e8deef1 100644 --- a/providers/alibaba/pyproject.toml +++ b/providers/alibaba/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/alibaba/src/airflow/providers/alibaba/cloud/hooks/analyticdb_spark.py b/providers/alibaba/src/airflow/providers/alibaba/cloud/hooks/analyticdb_spark.py index 73b6b33e66992..b9bb25766be44 100644 --- a/providers/alibaba/src/airflow/providers/alibaba/cloud/hooks/analyticdb_spark.py +++ b/providers/alibaba/src/airflow/providers/alibaba/cloud/hooks/analyticdb_spark.py @@ -306,8 +306,8 @@ def _validate_list_of_stringables(vals: Sequence[str | int | float]) -> bool: """ if ( vals is None - or not isinstance(vals, (tuple, list)) - or not all(isinstance(val, (str, int, float)) for val in vals) + or not isinstance(vals, tuple | list) + or not all(isinstance(val, str | int | float) for val in vals) ): raise ValueError("List of strings expected") return True @@ -322,7 +322,7 @@ def _validate_extra_conf(conf: dict[Any, Any]) -> bool: if conf: if not isinstance(conf, dict): raise ValueError("'conf' argument must be a dict") - if not all(isinstance(v, (str, int)) and v != "" for v in conf.values()): + if not all(isinstance(v, str | int) and v != "" for v in conf.values()): raise ValueError("'conf' values must be either strings or ints") return True diff --git a/providers/alibaba/src/airflow/providers/alibaba/cloud/hooks/maxcompute.py b/providers/alibaba/src/airflow/providers/alibaba/cloud/hooks/maxcompute.py index 94de067b07d9b..593f91a5c51c6 100644 --- a/providers/alibaba/src/airflow/providers/alibaba/cloud/hooks/maxcompute.py +++ b/providers/alibaba/src/airflow/providers/alibaba/cloud/hooks/maxcompute.py @@ -17,7 +17,8 @@ from __future__ import annotations import functools -from typing import TYPE_CHECKING, Any, Callable, TypeVar +from collections.abc import Callable +from typing import TYPE_CHECKING, Any, TypeVar from odps import ODPS diff --git a/providers/alibaba/src/airflow/providers/alibaba/cloud/hooks/oss.py b/providers/alibaba/src/airflow/providers/alibaba/cloud/hooks/oss.py index 2f6f18d43577a..c70cad5d9a27d 100644 --- a/providers/alibaba/src/airflow/providers/alibaba/cloud/hooks/oss.py +++ b/providers/alibaba/src/airflow/providers/alibaba/cloud/hooks/oss.py @@ -17,9 +17,10 @@ # under the License. from __future__ import annotations +from collections.abc import Callable from functools import wraps from inspect import signature -from typing import TYPE_CHECKING, Callable, TypeVar, cast +from typing import TYPE_CHECKING, TypeVar, cast from urllib.parse import urlsplit import oss2 diff --git a/providers/alibaba/tests/unit/alibaba/cloud/utils/test_utils.py b/providers/alibaba/tests/unit/alibaba/cloud/utils/test_utils.py index c4755cd53e879..3772cb1181e39 100644 --- a/providers/alibaba/tests/unit/alibaba/cloud/utils/test_utils.py +++ b/providers/alibaba/tests/unit/alibaba/cloud/utils/test_utils.py @@ -16,9 +16,10 @@ # under the License. from __future__ import annotations +from collections.abc import Callable from functools import wraps from inspect import signature -from typing import Callable, TypeVar, cast +from typing import TypeVar, cast T = TypeVar("T", bound=Callable) diff --git a/providers/amazon/pyproject.toml b/providers/amazon/pyproject.toml index cf9d548c8d11c..df15dae9ec335 100644 --- a/providers/amazon/pyproject.toml +++ b/providers/amazon/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/amazon/src/airflow/providers/amazon/aws/executors/ecs/utils.py b/providers/amazon/src/airflow/providers/amazon/aws/executors/ecs/utils.py index c7602d5a92459..f8d7f58062189 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/executors/ecs/utils.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/executors/ecs/utils.py @@ -25,9 +25,9 @@ import datetime from collections import defaultdict -from collections.abc import Sequence +from collections.abc import Callable, Sequence from dataclasses import dataclass -from typing import TYPE_CHECKING, Any, Callable +from typing import TYPE_CHECKING, Any from inflection import camelize diff --git a/providers/amazon/src/airflow/providers/amazon/aws/executors/utils/exponential_backoff_retry.py b/providers/amazon/src/airflow/providers/amazon/aws/executors/utils/exponential_backoff_retry.py index fa53011f04502..1255f56f0431f 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/executors/utils/exponential_backoff_retry.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/executors/utils/exponential_backoff_retry.py @@ -17,8 +17,8 @@ from __future__ import annotations import logging +from collections.abc import Callable from datetime import datetime, timedelta -from typing import Callable from airflow.utils import timezone diff --git a/providers/amazon/src/airflow/providers/amazon/aws/fs/s3.py b/providers/amazon/src/airflow/providers/amazon/aws/fs/s3.py index e0c28d923c4bb..6eec2d90f4e7d 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/fs/s3.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/fs/s3.py @@ -18,8 +18,9 @@ import asyncio import logging +from collections.abc import Callable from functools import partial -from typing import TYPE_CHECKING, Any, Callable +from typing import TYPE_CHECKING, Any import requests from botocore import UNSIGNED diff --git a/providers/amazon/src/airflow/providers/amazon/aws/hooks/base_aws.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/base_aws.py index 79641397bf485..14e20f0775a1d 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/hooks/base_aws.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/hooks/base_aws.py @@ -31,10 +31,11 @@ import logging import os import warnings +from collections.abc import Callable from copy import deepcopy from functools import cached_property, wraps from pathlib import Path -from typing import TYPE_CHECKING, Any, Callable, Generic, TypeVar, Union +from typing import TYPE_CHECKING, Any, Generic, TypeVar, Union import boto3 import botocore @@ -43,6 +44,8 @@ import requests import tenacity from asgiref.sync import sync_to_async +from boto3.resources.base import ServiceResource +from botocore.client import BaseClient from botocore.config import Config from botocore.waiter import Waiter, WaiterModel from dateutil.tz import tzlocal @@ -63,7 +66,16 @@ from airflow.utils.helpers import exactly_one from airflow.utils.log.logging_mixin import LoggingMixin -BaseAwsConnection = TypeVar("BaseAwsConnection", bound=Union[boto3.client, boto3.resource]) +# We need to set typeignore, sadly without it Sphinx build and mypy don't agree. +# ideally the code should be: +# BaseAwsConnection = TypeVar("BaseAwsConnection", bound=BaseClient | ServiceResource) +# but if we do that Sphinx complains about: +# TypeError: unsupported operand type(s) for |: 'BaseClient' and 'ServiceResource' +# If we change to Union syntax then mypy is not happy with UP007 Use `X | Y` for type annotations +# The only way to workaround it for now is to keep the union syntax with ignore for mypy +# We should try to resolve this later. +BaseAwsConnection = TypeVar("BaseAwsConnection", bound=Union[BaseClient, ServiceResource]) # type: ignore[operator] # noqa: UP007 + if AIRFLOW_V_3_0_PLUS: from airflow.sdk.exceptions import AirflowRuntimeError @@ -1038,7 +1050,7 @@ def _list_custom_waiters(self) -> list[str]: return WaiterModel(model_config).waiter_names -class AwsBaseHook(AwsGenericHook[Union[boto3.client, boto3.resource]]): +class AwsBaseHook(AwsGenericHook[Union[boto3.client, boto3.resource]]): # type: ignore[operator] # noqa: UP007 """ Base class for interact with AWS. diff --git a/providers/amazon/src/airflow/providers/amazon/aws/hooks/batch_client.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/batch_client.py index 0dcedfce08407..fcf7f4c547ceb 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/hooks/batch_client.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/hooks/batch_client.py @@ -30,7 +30,8 @@ import itertools import random import time -from typing import TYPE_CHECKING, Callable, Protocol, runtime_checkable +from collections.abc import Callable +from typing import TYPE_CHECKING, Protocol, runtime_checkable import botocore.client import botocore.exceptions diff --git a/providers/amazon/src/airflow/providers/amazon/aws/hooks/batch_waiters.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/batch_waiters.py index 44a39c6f2b30d..b2916d8afd884 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/hooks/batch_waiters.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/hooks/batch_waiters.py @@ -28,9 +28,10 @@ import json import sys +from collections.abc import Callable from copy import deepcopy from pathlib import Path -from typing import TYPE_CHECKING, Any, Callable +from typing import TYPE_CHECKING, Any import botocore.client import botocore.exceptions diff --git a/providers/amazon/src/airflow/providers/amazon/aws/hooks/ec2.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/ec2.py index ef4f374eb7b76..7e3af904fdcfb 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/hooks/ec2.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/hooks/ec2.py @@ -19,7 +19,8 @@ import functools import time -from typing import Callable, TypeVar +from collections.abc import Callable +from typing import TypeVar from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook diff --git a/providers/amazon/src/airflow/providers/amazon/aws/hooks/eks.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/eks.py index 210301590ee1e..6a80e20b7a81e 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/hooks/eks.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/hooks/eks.py @@ -23,11 +23,10 @@ import os import sys import tempfile -from collections.abc import Generator +from collections.abc import Callable, Generator from contextlib import contextmanager from enum import Enum from functools import partial -from typing import Callable from botocore.exceptions import ClientError from botocore.signers import RequestSigner diff --git a/providers/amazon/src/airflow/providers/amazon/aws/hooks/rds.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/rds.py index f58b089d7df1f..0ecba13f63050 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/hooks/rds.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/hooks/rds.py @@ -20,7 +20,8 @@ from __future__ import annotations import time -from typing import TYPE_CHECKING, Callable +from collections.abc import Callable +from typing import TYPE_CHECKING from airflow.exceptions import AirflowException, AirflowNotFoundException from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook diff --git a/providers/amazon/src/airflow/providers/amazon/aws/hooks/s3.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/s3.py index 578ba2a2c4646..a432534fab3c3 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/hooks/s3.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/hooks/s3.py @@ -28,7 +28,7 @@ import re import shutil import time -from collections.abc import AsyncIterator +from collections.abc import AsyncIterator, Callable from contextlib import suppress from copy import deepcopy from datetime import datetime @@ -37,7 +37,7 @@ from io import BytesIO from pathlib import Path from tempfile import NamedTemporaryFile, gettempdir -from typing import TYPE_CHECKING, Any, Callable +from typing import TYPE_CHECKING, Any from urllib.parse import urlsplit from uuid import uuid4 @@ -652,7 +652,7 @@ async def get_files_async( response = paginator.paginate(**params) async for page in response: if "Contents" in page: - keys.extend(k for k in page["Contents"] if isinstance(k.get("Size"), (int, float))) + keys.extend(k for k in page["Contents"] if isinstance(k.get("Size"), int | float)) return keys async def _list_keys_async( diff --git a/providers/amazon/src/airflow/providers/amazon/aws/hooks/sagemaker.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/sagemaker.py index a785cc5ce83cb..fad70bc4002a9 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/hooks/sagemaker.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/hooks/sagemaker.py @@ -23,10 +23,10 @@ import tempfile import time from collections import Counter, namedtuple -from collections.abc import AsyncGenerator, Generator +from collections.abc import AsyncGenerator, Callable, Generator from datetime import datetime from functools import partial -from typing import Any, Callable, cast +from typing import Any, cast from asgiref.sync import sync_to_async from botocore.exceptions import ClientError diff --git a/providers/amazon/src/airflow/providers/amazon/aws/hooks/sns.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/sns.py index 00315fbddc370..275df5b69a907 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/hooks/sns.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/hooks/sns.py @@ -29,7 +29,7 @@ def _get_message_attribute(o): return {"DataType": "Binary", "BinaryValue": o} if isinstance(o, str): return {"DataType": "String", "StringValue": o} - if isinstance(o, (int, float)): + if isinstance(o, int | float): return {"DataType": "Number", "StringValue": str(o)} if hasattr(o, "__iter__"): return {"DataType": "String.Array", "StringValue": json.dumps(o)} diff --git a/providers/amazon/src/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py b/providers/amazon/src/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py index b5f6aaa12662b..2eab684130c42 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py @@ -55,7 +55,7 @@ def json_serialize_legacy(value: Any) -> str | None: :param value: the object to serialize :return: string representation of `value` if it is an instance of datetime or `None` otherwise """ - if isinstance(value, (date, datetime)): + if isinstance(value, date | datetime): return value.isoformat() return None diff --git a/providers/amazon/src/airflow/providers/amazon/aws/operators/s3.py b/providers/amazon/src/airflow/providers/amazon/aws/operators/s3.py index f486084f34de0..1757614101a19 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/operators/s3.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/operators/s3.py @@ -536,7 +536,7 @@ def execute(self, context: Context): "Either keys or at least one of prefix, from_datetime, to_datetime should be set." ) - if isinstance(self.keys, (list, str)) and not self.keys: + if isinstance(self.keys, list | str) and not self.keys: return # handle case where dates are strings, specifically when sent as template fields and macros. if isinstance(self.to_datetime, str): diff --git a/providers/amazon/src/airflow/providers/amazon/aws/operators/sagemaker.py b/providers/amazon/src/airflow/providers/amazon/aws/operators/sagemaker.py index 1653038fbb804..23bcb0e17b28a 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/operators/sagemaker.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/operators/sagemaker.py @@ -20,8 +20,8 @@ import json import time import urllib -from collections.abc import Sequence -from typing import TYPE_CHECKING, Any, Callable, ClassVar +from collections.abc import Callable, Sequence +from typing import TYPE_CHECKING, Any, ClassVar from botocore.exceptions import ClientError diff --git a/providers/amazon/src/airflow/providers/amazon/aws/sensors/s3.py b/providers/amazon/src/airflow/providers/amazon/aws/sensors/s3.py index 8a34812eca6a4..844e8ec304832 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/sensors/s3.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/sensors/s3.py @@ -21,9 +21,9 @@ import inspect import os import re -from collections.abc import Sequence +from collections.abc import Callable, Sequence from datetime import datetime, timedelta -from typing import TYPE_CHECKING, Any, Callable, cast +from typing import TYPE_CHECKING, Any, cast from airflow.configuration import conf from airflow.providers.amazon.aws.utils import validate_execute_complete_event diff --git a/providers/amazon/src/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py index d7e7a9e967cc5..266648f1dc151 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py @@ -20,13 +20,13 @@ import json import os -from collections.abc import Sequence +from collections.abc import Callable, Sequence from copy import copy from datetime import datetime from decimal import Decimal from functools import cached_property from tempfile import NamedTemporaryFile -from typing import IO, TYPE_CHECKING, Any, Callable +from typing import IO, TYPE_CHECKING, Any from uuid import uuid4 from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook diff --git a/providers/amazon/src/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py index 4e715eba6439f..c96ae1d96c155 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py @@ -20,8 +20,8 @@ from __future__ import annotations import json -from collections.abc import Sequence -from typing import TYPE_CHECKING, Callable +from collections.abc import Callable, Sequence +from typing import TYPE_CHECKING from airflow.models import BaseOperator from airflow.providers.amazon.aws.hooks.dynamodb import DynamoDBHook diff --git a/providers/amazon/src/airflow/providers/amazon/aws/transfers/s3_to_sql.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/s3_to_sql.py index 16250d2f78ece..baeceb4387650 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/transfers/s3_to_sql.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/transfers/s3_to_sql.py @@ -16,10 +16,10 @@ # under the License. from __future__ import annotations -from collections.abc import Iterable, Sequence +from collections.abc import Callable, Iterable, Sequence from functools import cached_property from tempfile import NamedTemporaryFile -from typing import TYPE_CHECKING, Callable +from typing import TYPE_CHECKING from airflow.exceptions import AirflowException from airflow.hooks.base import BaseHook diff --git a/providers/amazon/src/airflow/providers/amazon/aws/transfers/sql_to_s3.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/sql_to_s3.py index 7d1f2656c4f2b..29f58beaf4f58 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/transfers/sql_to_s3.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/transfers/sql_to_s3.py @@ -22,9 +22,7 @@ import io from collections import namedtuple from collections.abc import Iterable, Mapping, Sequence -from typing import TYPE_CHECKING, Any, cast - -from typing_extensions import Literal +from typing import TYPE_CHECKING, Any, Literal, cast from airflow.exceptions import AirflowException from airflow.hooks.base import BaseHook diff --git a/providers/amazon/src/airflow/providers/amazon/aws/utils/connection_wrapper.py b/providers/amazon/src/airflow/providers/amazon/aws/utils/connection_wrapper.py index ef2ca7b4cb343..79e6b966bc056 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/utils/connection_wrapper.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/utils/connection_wrapper.py @@ -187,7 +187,7 @@ def __post_init__(self, conn: Connection | AwsConnectionWrapper | _ConnectionMet return if TYPE_CHECKING: - assert isinstance(conn, (Connection, _ConnectionMetadata)) + assert isinstance(conn, Connection | _ConnectionMetadata) # Assign attributes from AWS Connection self.conn_id = conn.conn_id diff --git a/providers/amazon/src/airflow/providers/amazon/aws/utils/suppress.py b/providers/amazon/src/airflow/providers/amazon/aws/utils/suppress.py index 908106f0c3165..7105840f4b4f4 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/utils/suppress.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/utils/suppress.py @@ -28,8 +28,9 @@ from __future__ import annotations import logging +from collections.abc import Callable from functools import wraps -from typing import Callable, TypeVar +from typing import TypeVar from airflow.typing_compat import ParamSpec diff --git a/providers/amazon/src/airflow/providers/amazon/aws/utils/waiter.py b/providers/amazon/src/airflow/providers/amazon/aws/utils/waiter.py index 72e03cec5d619..d079d5688a9b7 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/utils/waiter.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/utils/waiter.py @@ -19,8 +19,8 @@ import logging import time +from collections.abc import Callable from enum import Enum -from typing import Callable from airflow.exceptions import AirflowException diff --git a/providers/amazon/tests/unit/amazon/aws/executors/ecs/test_ecs_executor.py b/providers/amazon/tests/unit/amazon/aws/executors/ecs/test_ecs_executor.py index ff7b9699c98a0..6d0b0055906e1 100644 --- a/providers/amazon/tests/unit/amazon/aws/executors/ecs/test_ecs_executor.py +++ b/providers/amazon/tests/unit/amazon/aws/executors/ecs/test_ecs_executor.py @@ -22,8 +22,8 @@ import os import re import time +from collections.abc import Callable from functools import partial -from typing import Callable from unittest import mock from unittest.mock import MagicMock diff --git a/providers/amazon/tests/unit/amazon/aws/utils/eks_test_utils.py b/providers/amazon/tests/unit/amazon/aws/utils/eks_test_utils.py index 5a000c00611f8..dbf589572796c 100644 --- a/providers/amazon/tests/unit/amazon/aws/utils/eks_test_utils.py +++ b/providers/amazon/tests/unit/amazon/aws/utils/eks_test_utils.py @@ -20,7 +20,7 @@ import re from copy import deepcopy from re import Pattern -from typing import TYPE_CHECKING, Union +from typing import TYPE_CHECKING from unit.amazon.aws.utils.eks_test_constants import ( STATUS, @@ -38,7 +38,7 @@ from airflow.providers.amazon.aws.hooks.eks import EksHook -InputTypes = Union[type[ClusterInputs], type[NodegroupInputs], type[FargateProfileInputs]] +InputTypes = type[ClusterInputs] | type[NodegroupInputs] | type[FargateProfileInputs] def attributes_to_test( diff --git a/providers/apache/beam/pyproject.toml b/providers/apache/beam/pyproject.toml index 0b28523ad0094..590bbe6b0429a 100644 --- a/providers/apache/beam/pyproject.toml +++ b/providers/apache/beam/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/apache/beam/src/airflow/providers/apache/beam/hooks/beam.py b/providers/apache/beam/src/airflow/providers/apache/beam/hooks/beam.py index 2ac4ed172d252..476a5ffc0555a 100644 --- a/providers/apache/beam/src/airflow/providers/apache/beam/hooks/beam.py +++ b/providers/apache/beam/src/airflow/providers/apache/beam/hooks/beam.py @@ -31,7 +31,8 @@ import subprocess import tempfile import textwrap -from typing import TYPE_CHECKING, Callable +from collections.abc import Callable +from typing import TYPE_CHECKING from packaging.version import Version diff --git a/providers/apache/beam/src/airflow/providers/apache/beam/operators/beam.py b/providers/apache/beam/src/airflow/providers/apache/beam/operators/beam.py index 073f7bb750c30..d1e3bcd411064 100644 --- a/providers/apache/beam/src/airflow/providers/apache/beam/operators/beam.py +++ b/providers/apache/beam/src/airflow/providers/apache/beam/operators/beam.py @@ -24,11 +24,11 @@ import stat import tempfile from abc import ABC, ABCMeta, abstractmethod -from collections.abc import Sequence +from collections.abc import Callable, Sequence from concurrent.futures import ThreadPoolExecutor, as_completed from contextlib import ExitStack from functools import partial -from typing import TYPE_CHECKING, Any, Callable +from typing import TYPE_CHECKING, Any from packaging.version import parse as parse_version diff --git a/providers/apache/cassandra/pyproject.toml b/providers/apache/cassandra/pyproject.toml index 23e836b52ffbb..741413d27c33d 100644 --- a/providers/apache/cassandra/pyproject.toml +++ b/providers/apache/cassandra/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/apache/cassandra/src/airflow/providers/apache/cassandra/hooks/cassandra.py b/providers/apache/cassandra/src/airflow/providers/apache/cassandra/hooks/cassandra.py index 2e95f36b25727..9ba9b9915ed4d 100644 --- a/providers/apache/cassandra/src/airflow/providers/apache/cassandra/hooks/cassandra.py +++ b/providers/apache/cassandra/src/airflow/providers/apache/cassandra/hooks/cassandra.py @@ -20,7 +20,7 @@ from __future__ import annotations import re -from typing import Any, Union +from typing import Any, TypeAlias from cassandra.auth import PlainTextAuthProvider from cassandra.cluster import Cluster, Session @@ -34,7 +34,7 @@ from airflow.hooks.base import BaseHook from airflow.utils.log.logging_mixin import LoggingMixin -Policy = Union[DCAwareRoundRobinPolicy, RoundRobinPolicy, TokenAwarePolicy, WhiteListRoundRobinPolicy] +Policy: TypeAlias = DCAwareRoundRobinPolicy | RoundRobinPolicy | TokenAwarePolicy | WhiteListRoundRobinPolicy class CassandraHook(BaseHook, LoggingMixin): diff --git a/providers/apache/drill/pyproject.toml b/providers/apache/drill/pyproject.toml index dfb78103fd6a2..7e7e5e8ad2b99 100644 --- a/providers/apache/drill/pyproject.toml +++ b/providers/apache/drill/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/apache/druid/pyproject.toml b/providers/apache/druid/pyproject.toml index 8ebcd8a10ad10..7e27df39ace7f 100644 --- a/providers/apache/druid/pyproject.toml +++ b/providers/apache/druid/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/apache/flink/pyproject.toml b/providers/apache/flink/pyproject.toml index d30de00cd9adb..760651c11b053 100644 --- a/providers/apache/flink/pyproject.toml +++ b/providers/apache/flink/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/apache/hdfs/pyproject.toml b/providers/apache/hdfs/pyproject.toml index 5c6030d945fa7..05a00d868a5db 100644 --- a/providers/apache/hdfs/pyproject.toml +++ b/providers/apache/hdfs/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/apache/hive/pyproject.toml b/providers/apache/hive/pyproject.toml index 1fe9dd3db1781..03b114a09d803 100644 --- a/providers/apache/hive/pyproject.toml +++ b/providers/apache/hive/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/apache/hive/src/airflow/providers/apache/hive/hooks/hive.py b/providers/apache/hive/src/airflow/providers/apache/hive/hooks/hive.py index b3afc1a60c495..2c682ca0d3812 100644 --- a/providers/apache/hive/src/airflow/providers/apache/hive/hooks/hive.py +++ b/providers/apache/hive/src/airflow/providers/apache/hive/hooks/hive.py @@ -25,10 +25,10 @@ import time from collections.abc import Iterable, Mapping from tempfile import NamedTemporaryFile, TemporaryDirectory -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, Literal from deprecated import deprecated -from typing_extensions import Literal, overload +from typing_extensions import overload if TYPE_CHECKING: import pandas as pd diff --git a/providers/apache/hive/src/airflow/providers/apache/hive/operators/hive_stats.py b/providers/apache/hive/src/airflow/providers/apache/hive/operators/hive_stats.py index 9b5daf1f64376..1e117de38ede6 100644 --- a/providers/apache/hive/src/airflow/providers/apache/hive/operators/hive_stats.py +++ b/providers/apache/hive/src/airflow/providers/apache/hive/operators/hive_stats.py @@ -18,8 +18,8 @@ from __future__ import annotations import json -from collections.abc import Sequence -from typing import TYPE_CHECKING, Any, Callable +from collections.abc import Callable, Sequence +from typing import TYPE_CHECKING, Any from airflow.exceptions import AirflowException from airflow.models import BaseOperator diff --git a/providers/apache/iceberg/pyproject.toml b/providers/apache/iceberg/pyproject.toml index 42452d6fc7ef3..50f453cf53d7d 100644 --- a/providers/apache/iceberg/pyproject.toml +++ b/providers/apache/iceberg/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/apache/impala/pyproject.toml b/providers/apache/impala/pyproject.toml index f90366a36af12..fc14a81bf8743 100644 --- a/providers/apache/impala/pyproject.toml +++ b/providers/apache/impala/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/apache/kafka/pyproject.toml b/providers/apache/kafka/pyproject.toml index cb3cbbbb485c7..be6f78a23a6b6 100644 --- a/providers/apache/kafka/pyproject.toml +++ b/providers/apache/kafka/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/apache/kafka/src/airflow/providers/apache/kafka/operators/consume.py b/providers/apache/kafka/src/airflow/providers/apache/kafka/operators/consume.py index ac382c2a22be2..3dedbdca3f5c3 100644 --- a/providers/apache/kafka/src/airflow/providers/apache/kafka/operators/consume.py +++ b/providers/apache/kafka/src/airflow/providers/apache/kafka/operators/consume.py @@ -16,9 +16,9 @@ # under the License. from __future__ import annotations -from collections.abc import Sequence +from collections.abc import Callable, Sequence from functools import partial -from typing import Any, Callable +from typing import Any from airflow.exceptions import AirflowException from airflow.models import BaseOperator diff --git a/providers/apache/kafka/src/airflow/providers/apache/kafka/operators/produce.py b/providers/apache/kafka/src/airflow/providers/apache/kafka/operators/produce.py index 6c538615eafa9..1dfc553f8a1e6 100644 --- a/providers/apache/kafka/src/airflow/providers/apache/kafka/operators/produce.py +++ b/providers/apache/kafka/src/airflow/providers/apache/kafka/operators/produce.py @@ -17,9 +17,9 @@ from __future__ import annotations import logging -from collections.abc import Sequence +from collections.abc import Callable, Sequence from functools import partial -from typing import Any, Callable +from typing import Any from airflow.exceptions import AirflowException from airflow.models import BaseOperator diff --git a/providers/apache/kafka/src/airflow/providers/apache/kafka/sensors/kafka.py b/providers/apache/kafka/src/airflow/providers/apache/kafka/sensors/kafka.py index 9c545cd2ee6c5..afe196a29f064 100644 --- a/providers/apache/kafka/src/airflow/providers/apache/kafka/sensors/kafka.py +++ b/providers/apache/kafka/src/airflow/providers/apache/kafka/sensors/kafka.py @@ -16,8 +16,8 @@ # under the License. from __future__ import annotations -from collections.abc import Sequence -from typing import Any, Callable +from collections.abc import Callable, Sequence +from typing import Any from airflow.models import BaseOperator from airflow.providers.apache.kafka.triggers.await_message import AwaitMessageTrigger diff --git a/providers/apache/kylin/pyproject.toml b/providers/apache/kylin/pyproject.toml index 71bd30a376ee5..1c5cea4d70248 100644 --- a/providers/apache/kylin/pyproject.toml +++ b/providers/apache/kylin/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/apache/livy/pyproject.toml b/providers/apache/livy/pyproject.toml index 1c477b5d92aae..7850a7f1cc8b0 100644 --- a/providers/apache/livy/pyproject.toml +++ b/providers/apache/livy/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/apache/livy/src/airflow/providers/apache/livy/hooks/livy.py b/providers/apache/livy/src/airflow/providers/apache/livy/hooks/livy.py index d1d25a9a2e3e5..2cf8957b1120b 100644 --- a/providers/apache/livy/src/airflow/providers/apache/livy/hooks/livy.py +++ b/providers/apache/livy/src/airflow/providers/apache/livy/hooks/livy.py @@ -441,8 +441,8 @@ def _validate_list_of_stringables(vals: Sequence[str | int | float]) -> bool: """ if ( vals is None - or not isinstance(vals, (tuple, list)) - or not all(isinstance(val, (str, int, float)) for val in vals) + or not isinstance(vals, tuple | list) + or not all(isinstance(val, str | int | float) for val in vals) ): raise ValueError("List of strings expected") return True @@ -458,7 +458,7 @@ def _validate_extra_conf(conf: dict[Any, Any]) -> bool: if conf: if not isinstance(conf, dict): raise ValueError("'conf' argument must be a dict") - if not all(isinstance(v, (str, int)) and v != "" for v in conf.values()): + if not all(isinstance(v, str | int) and v != "" for v in conf.values()): raise ValueError("'conf' values must be either strings or ints") return True @@ -827,8 +827,8 @@ def _validate_list_of_stringables(vals: Sequence[str | int | float]) -> bool: """ if ( vals is None - or not isinstance(vals, (tuple, list)) - or not all(isinstance(val, (str, int, float)) for val in vals) + or not isinstance(vals, tuple | list) + or not all(isinstance(val, str | int | float) for val in vals) ): raise ValueError("List of strings expected") return True @@ -844,6 +844,6 @@ def _validate_extra_conf(conf: dict[Any, Any]) -> bool: if conf: if not isinstance(conf, dict): raise ValueError("'conf' argument must be a dict") - if not all(isinstance(v, (str, int)) and v != "" for v in conf.values()): + if not all(isinstance(v, str | int) and v != "" for v in conf.values()): raise ValueError("'conf' values must be either strings or ints") return True diff --git a/providers/apache/pig/pyproject.toml b/providers/apache/pig/pyproject.toml index d98fc6a6012e3..274e2d03b5e73 100644 --- a/providers/apache/pig/pyproject.toml +++ b/providers/apache/pig/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/apache/pinot/pyproject.toml b/providers/apache/pinot/pyproject.toml index 4875767690f53..1a1874d5bb762 100644 --- a/providers/apache/pinot/pyproject.toml +++ b/providers/apache/pinot/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/apache/spark/pyproject.toml b/providers/apache/spark/pyproject.toml index 542095930bf51..7b2518d0c6fc2 100644 --- a/providers/apache/spark/pyproject.toml +++ b/providers/apache/spark/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/apache/spark/src/airflow/providers/apache/spark/decorators/pyspark.py b/providers/apache/spark/src/airflow/providers/apache/spark/decorators/pyspark.py index 32b1a15d99882..8e93185321f1b 100644 --- a/providers/apache/spark/src/airflow/providers/apache/spark/decorators/pyspark.py +++ b/providers/apache/spark/src/airflow/providers/apache/spark/decorators/pyspark.py @@ -18,8 +18,8 @@ from __future__ import annotations import inspect -from collections.abc import Sequence -from typing import TYPE_CHECKING, Any, Callable +from collections.abc import Callable, Sequence +from typing import TYPE_CHECKING, Any from airflow.providers.apache.spark.version_compat import AIRFLOW_V_3_0_PLUS diff --git a/providers/apache/tinkerpop/pyproject.toml b/providers/apache/tinkerpop/pyproject.toml index 5103986eac1a9..363a964e13b5b 100644 --- a/providers/apache/tinkerpop/pyproject.toml +++ b/providers/apache/tinkerpop/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/apprise/pyproject.toml b/providers/apprise/pyproject.toml index 6d45b938648ed..5675cf21bb5ce 100644 --- a/providers/apprise/pyproject.toml +++ b/providers/apprise/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/arangodb/pyproject.toml b/providers/arangodb/pyproject.toml index 0f4a7e0f9fd51..af1e6aff3bc2c 100644 --- a/providers/arangodb/pyproject.toml +++ b/providers/arangodb/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/arangodb/src/airflow/providers/arangodb/operators/arangodb.py b/providers/arangodb/src/airflow/providers/arangodb/operators/arangodb.py index 6514f756bd208..50a3421dfe73d 100644 --- a/providers/arangodb/src/airflow/providers/arangodb/operators/arangodb.py +++ b/providers/arangodb/src/airflow/providers/arangodb/operators/arangodb.py @@ -17,8 +17,8 @@ # under the License. from __future__ import annotations -from collections.abc import Sequence -from typing import TYPE_CHECKING, Any, Callable +from collections.abc import Callable, Sequence +from typing import TYPE_CHECKING, Any from airflow.exceptions import AirflowException from airflow.models import BaseOperator diff --git a/providers/asana/pyproject.toml b/providers/asana/pyproject.toml index 8fe4eaabad303..4d11a30d7bb84 100644 --- a/providers/asana/pyproject.toml +++ b/providers/asana/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/atlassian/jira/pyproject.toml b/providers/atlassian/jira/pyproject.toml index b8993b4d4c98f..5696c2ff19f99 100644 --- a/providers/atlassian/jira/pyproject.toml +++ b/providers/atlassian/jira/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/atlassian/jira/src/airflow/providers/atlassian/jira/operators/jira.py b/providers/atlassian/jira/src/airflow/providers/atlassian/jira/operators/jira.py index fde6222c208a9..d1a2fe6f06a55 100644 --- a/providers/atlassian/jira/src/airflow/providers/atlassian/jira/operators/jira.py +++ b/providers/atlassian/jira/src/airflow/providers/atlassian/jira/operators/jira.py @@ -17,8 +17,8 @@ # under the License. from __future__ import annotations -from collections.abc import Sequence -from typing import TYPE_CHECKING, Any, Callable +from collections.abc import Callable, Sequence +from typing import TYPE_CHECKING, Any from airflow.models import BaseOperator from airflow.providers.atlassian.jira.hooks.jira import JiraHook diff --git a/providers/atlassian/jira/src/airflow/providers/atlassian/jira/sensors/jira.py b/providers/atlassian/jira/src/airflow/providers/atlassian/jira/sensors/jira.py index ee8bfaa301407..e81c96b78e6f0 100644 --- a/providers/atlassian/jira/src/airflow/providers/atlassian/jira/sensors/jira.py +++ b/providers/atlassian/jira/src/airflow/providers/atlassian/jira/sensors/jira.py @@ -17,8 +17,8 @@ # under the License. from __future__ import annotations -from collections.abc import Sequence -from typing import TYPE_CHECKING, Any, Callable +from collections.abc import Callable, Sequence +from typing import TYPE_CHECKING, Any from airflow.providers.atlassian.jira.hooks.jira import JiraHook from airflow.providers.atlassian.jira.version_compat import AIRFLOW_V_3_0_PLUS diff --git a/providers/celery/pyproject.toml b/providers/celery/pyproject.toml index 555d33f6fa4fe..37421f947fd7d 100644 --- a/providers/celery/pyproject.toml +++ b/providers/celery/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/celery/src/airflow/providers/celery/executors/celery_executor_utils.py b/providers/celery/src/airflow/providers/celery/executors/celery_executor_utils.py index 6bef6ae9e3e81..0259d60e35d34 100644 --- a/providers/celery/src/airflow/providers/celery/executors/celery_executor_utils.py +++ b/providers/celery/src/airflow/providers/celery/executors/celery_executor_utils.py @@ -32,7 +32,7 @@ import warnings from collections.abc import Mapping, MutableMapping, Sequence from concurrent.futures import ProcessPoolExecutor -from typing import TYPE_CHECKING, Any, Optional, Union +from typing import TYPE_CHECKING, Any from celery import Celery, Task, states as celery_states from celery.backends.base import BaseKeyValueStoreBackend @@ -71,11 +71,9 @@ # the type as the union of both kinds CommandType = Sequence[str] - TaskInstanceInCelery: TypeAlias = tuple[ - TaskInstanceKey, Union[workloads.All, CommandType], Optional[str], Task - ] + TaskInstanceInCelery: TypeAlias = tuple[TaskInstanceKey, workloads.All | CommandType, str | None, Task] - TaskTuple = tuple[TaskInstanceKey, CommandType, Optional[str], Optional[Any]] + TaskTuple = tuple[TaskInstanceKey, CommandType, str | None, Any | None] OPERATION_TIMEOUT = conf.getfloat("celery", "operation_timeout") diff --git a/providers/cloudant/provider.yaml b/providers/cloudant/provider.yaml index 785623450fe75..f845c7f324ba6 100644 --- a/providers/cloudant/provider.yaml +++ b/providers/cloudant/provider.yaml @@ -54,13 +54,6 @@ versions: - 1.0.1 - 1.0.0 -excluded-python-versions: - # ibmcloudant transitively brings in urllib3 2.x, but the snowflake provider has a dependency that pins - # urllib3 to 1.x on Python 3.9; thus we exclude those Python versions from taking the update - # to ibmcloudant. - # See #21004, #41555, and https://github.com/snowflakedb/snowflake-connector-python/issues/2016 - - "3.9" - integrations: - integration-name: IBM Cloudant external-doc-url: https://www.ibm.com/cloud/cloudant diff --git a/providers/cloudant/pyproject.toml b/providers/cloudant/pyproject.toml index e4243de8fbd88..e247f8da60629 100644 --- a/providers/cloudant/pyproject.toml +++ b/providers/cloudant/pyproject.toml @@ -49,7 +49,7 @@ classifiers = [ "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9,!=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/cloudant/src/airflow/providers/cloudant/hooks/cloudant.py b/providers/cloudant/src/airflow/providers/cloudant/hooks/cloudant.py index b09ad6932efa6..35cb6064b9ef9 100644 --- a/providers/cloudant/src/airflow/providers/cloudant/hooks/cloudant.py +++ b/providers/cloudant/src/airflow/providers/cloudant/hooks/cloudant.py @@ -19,13 +19,9 @@ from __future__ import annotations -import sys from typing import TYPE_CHECKING, Any -if sys.version_info < (3, 10): - from airflow.providers.cloudant.cloudant_fake import CloudantV1, CouchDbSessionAuthenticator -else: - from ibmcloudant import CloudantV1, CouchDbSessionAuthenticator +from ibmcloudant import CloudantV1, CouchDbSessionAuthenticator from airflow.exceptions import AirflowException from airflow.hooks.base import BaseHook diff --git a/providers/cloudant/tests/unit/cloudant/hooks/test_cloudant.py b/providers/cloudant/tests/unit/cloudant/hooks/test_cloudant.py index 072c93c79528b..d8dab6f88edd7 100644 --- a/providers/cloudant/tests/unit/cloudant/hooks/test_cloudant.py +++ b/providers/cloudant/tests/unit/cloudant/hooks/test_cloudant.py @@ -17,24 +17,15 @@ # under the License. from __future__ import annotations -import sys from unittest.mock import patch import pytest from airflow.exceptions import AirflowException from airflow.models import Connection +from airflow.providers.cloudant.hooks.cloudant import CloudantHook -pytestmark = [] - -if sys.version_info < (3, 10): - pytestmark.append( - pytest.mark.skip( - f"Skipping {__name__} as the cloudant provider is not supported on Python 3.9, see #41555." - ) - ) -else: - from airflow.providers.cloudant.hooks.cloudant import CloudantHook +pytestmark: list[pytest.Mark] = [] class TestCloudantHook: diff --git a/providers/cncf/kubernetes/pyproject.toml b/providers/cncf/kubernetes/pyproject.toml index 79baa43ea9341..f2a495b1f0cee 100644 --- a/providers/cncf/kubernetes/pyproject.toml +++ b/providers/cncf/kubernetes/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/callbacks.py b/providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/callbacks.py index d87e8065dbd1a..20ea0d02a8846 100644 --- a/providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/callbacks.py +++ b/providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/callbacks.py @@ -17,7 +17,7 @@ from __future__ import annotations from enum import Enum -from typing import TYPE_CHECKING, Union +from typing import TYPE_CHECKING, TypeAlias import kubernetes.client as k8s import kubernetes_asyncio.client as async_k8s @@ -26,7 +26,7 @@ from airflow.providers.cncf.kubernetes.operators.pod import KubernetesPodOperator from airflow.utils.context import Context -client_type = Union[k8s.CoreV1Api, async_k8s.CoreV1Api] +client_type: TypeAlias = k8s.CoreV1Api | async_k8s.CoreV1Api class ExecutionMode(str, Enum): diff --git a/providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/decorators/kubernetes.py b/providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/decorators/kubernetes.py index 3690ff0d6cd44..eb4752c0ffb07 100644 --- a/providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/decorators/kubernetes.py +++ b/providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/decorators/kubernetes.py @@ -19,10 +19,10 @@ import base64 import os import pickle -from collections.abc import Sequence +from collections.abc import Callable, Sequence from shlex import quote from tempfile import TemporaryDirectory -from typing import TYPE_CHECKING, Callable +from typing import TYPE_CHECKING import dill from kubernetes.client import models as k8s diff --git a/providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/decorators/kubernetes_cmd.py b/providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/decorators/kubernetes_cmd.py index a65efad1ae6cf..78e89d44cde02 100644 --- a/providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/decorators/kubernetes_cmd.py +++ b/providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/decorators/kubernetes_cmd.py @@ -17,8 +17,8 @@ from __future__ import annotations import warnings -from collections.abc import Sequence -from typing import TYPE_CHECKING, Callable +from collections.abc import Callable, Sequence +from typing import TYPE_CHECKING from airflow.providers.cncf.kubernetes.version_compat import AIRFLOW_V_3_0_PLUS diff --git a/providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/executors/kubernetes_executor_types.py b/providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/executors/kubernetes_executor_types.py index 077ac422d4228..78ab517c52677 100644 --- a/providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/executors/kubernetes_executor_types.py +++ b/providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/executors/kubernetes_executor_types.py @@ -16,7 +16,7 @@ # under the License. from __future__ import annotations -from typing import TYPE_CHECKING, Any, Optional, Union +from typing import TYPE_CHECKING, Any ADOPTED = "adopted" if TYPE_CHECKING: @@ -29,13 +29,13 @@ CommandType = Sequence[str] # TaskInstance key, command, configuration, pod_template_file - KubernetesJobType = tuple[TaskInstanceKey, CommandType, Any, Optional[str]] + KubernetesJobType = tuple[TaskInstanceKey, CommandType, Any, str | None] # key, pod state, pod_name, namespace, resource_version - KubernetesResultsType = tuple[TaskInstanceKey, Optional[Union[TaskInstanceState, str]], str, str, str] + KubernetesResultsType = tuple[TaskInstanceKey, TaskInstanceState | str | None, str, str, str] # pod_name, namespace, pod state, annotations, resource_version - KubernetesWatchType = tuple[str, str, Optional[Union[TaskInstanceState, str]], dict[str, str], str] + KubernetesWatchType = tuple[str, str, TaskInstanceState | str | None, dict[str, str], str] ALL_NAMESPACES = "ALL_NAMESPACES" POD_EXECUTOR_DONE_KEY = "airflow_executor_done" diff --git a/providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/kube_client.py b/providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/kube_client.py index 3a5c099f59518..cc37ed755a61d 100644 --- a/providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/kube_client.py +++ b/providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/kube_client.py @@ -87,8 +87,17 @@ def _enable_tcp_keepalive() -> None: else: log.debug("Unable to set TCP_KEEPCNT on this platform") - HTTPSConnection.default_socket_options = HTTPSConnection.default_socket_options + socket_options - HTTPConnection.default_socket_options = HTTPConnection.default_socket_options + socket_options + # Cast both the default options and our socket options + socket_options_cast: list[tuple[int, int, int | bytes]] = [ + (level, opt, val) for level, opt, val in socket_options + ] + default_options_cast: list[tuple[int, int, int | bytes]] = [ + (level, opt, val) for level, opt, val in HTTPSConnection.default_socket_options + ] + + # Then use the cast versions for both HTTPS and HTTP + HTTPSConnection.default_socket_options = default_options_cast + socket_options_cast + HTTPConnection.default_socket_options = default_options_cast + socket_options_cast def get_kube_client( diff --git a/providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/operators/pod.py b/providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/operators/pod.py index b5067e7a17bf5..ed8511be98deb 100644 --- a/providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/operators/pod.py +++ b/providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/operators/pod.py @@ -27,11 +27,11 @@ import re import shlex import string -from collections.abc import Container, Iterable, Sequence +from collections.abc import Callable, Container, Iterable, Sequence from contextlib import AbstractContextManager from enum import Enum from functools import cached_property -from typing import TYPE_CHECKING, Any, Callable, Literal +from typing import TYPE_CHECKING, Any, Literal import kubernetes import tenacity diff --git a/providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/utils/k8s_resource_iterator.py b/providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/utils/k8s_resource_iterator.py index d66eb9fc80bc0..5102847ecc429 100644 --- a/providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/utils/k8s_resource_iterator.py +++ b/providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/utils/k8s_resource_iterator.py @@ -16,8 +16,7 @@ # under the License. from __future__ import annotations -from collections.abc import Iterator -from typing import Callable +from collections.abc import Callable, Iterator from kubernetes.utils import FailToCreateError diff --git a/providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/utils/pod_manager.py b/providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/utils/pod_manager.py index 5942b68afa89e..12a10e0dad397 100644 --- a/providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/utils/pod_manager.py +++ b/providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/utils/pod_manager.py @@ -27,7 +27,7 @@ from contextlib import closing, suppress from dataclasses import dataclass from datetime import timedelta -from typing import TYPE_CHECKING, Protocol, cast +from typing import TYPE_CHECKING, Literal, Protocol, cast import pendulum import tenacity @@ -36,7 +36,6 @@ from kubernetes.stream import stream as kubernetes_stream from pendulum import DateTime from pendulum.parsing.exceptions import ParserError -from typing_extensions import Literal from urllib3.exceptions import HTTPError, TimeoutError from airflow.exceptions import AirflowException diff --git a/providers/cncf/kubernetes/tests/unit/cncf/kubernetes/decorators/test_kubernetes_commons.py b/providers/cncf/kubernetes/tests/unit/cncf/kubernetes/decorators/test_kubernetes_commons.py index 015486157e0ae..860cf3294a9ea 100644 --- a/providers/cncf/kubernetes/tests/unit/cncf/kubernetes/decorators/test_kubernetes_commons.py +++ b/providers/cncf/kubernetes/tests/unit/cncf/kubernetes/decorators/test_kubernetes_commons.py @@ -17,7 +17,7 @@ from __future__ import annotations import asyncio -from typing import Callable +from collections.abc import Callable from unittest import mock import pytest diff --git a/providers/cohere/pyproject.toml b/providers/cohere/pyproject.toml index bd2d6ff525ce0..7e2aa17bf12df 100644 --- a/providers/cohere/pyproject.toml +++ b/providers/cohere/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/common/compat/pyproject.toml b/providers/common/compat/pyproject.toml index f284d1109879f..5e19191ad0c94 100644 --- a/providers/common/compat/pyproject.toml +++ b/providers/common/compat/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/common/io/pyproject.toml b/providers/common/io/pyproject.toml index 0fbb4c4cd6c83..0f8c9c19c48bd 100644 --- a/providers/common/io/pyproject.toml +++ b/providers/common/io/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/common/messaging/pyproject.toml b/providers/common/messaging/pyproject.toml index dd7dd73c5cd66..873ae6b503f53 100644 --- a/providers/common/messaging/pyproject.toml +++ b/providers/common/messaging/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/common/sql/pyproject.toml b/providers/common/sql/pyproject.toml index 95d6ac9158ba8..5b5d8e743d0f2 100644 --- a/providers/common/sql/pyproject.toml +++ b/providers/common/sql/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/common/sql/src/airflow/providers/common/sql/dialects/dialect.py b/providers/common/sql/src/airflow/providers/common/sql/dialects/dialect.py index 88453c73a33e5..6380f4091fd67 100644 --- a/providers/common/sql/src/airflow/providers/common/sql/dialects/dialect.py +++ b/providers/common/sql/src/airflow/providers/common/sql/dialects/dialect.py @@ -17,8 +17,8 @@ from __future__ import annotations import re -from collections.abc import Iterable, Mapping -from typing import TYPE_CHECKING, Any, Callable, TypeVar +from collections.abc import Callable, Iterable, Mapping +from typing import TYPE_CHECKING, Any, TypeVar from methodtools import lru_cache diff --git a/providers/common/sql/src/airflow/providers/common/sql/dialects/dialect.pyi b/providers/common/sql/src/airflow/providers/common/sql/dialects/dialect.pyi index ff208e77e5c82..7ab9a3efa0f6f 100644 --- a/providers/common/sql/src/airflow/providers/common/sql/dialects/dialect.pyi +++ b/providers/common/sql/src/airflow/providers/common/sql/dialects/dialect.pyi @@ -32,8 +32,8 @@ Definition of the public interface for airflow.providers.common.sql.src.airflow. isort:skip_file """ -from collections.abc import Iterable, Mapping -from typing import Any, Callable, TypeVar +from collections.abc import Callable, Iterable, Mapping +from typing import Any, TypeVar from _typeshed import Incomplete as Incomplete from sqlalchemy.engine import Inspector as Inspector diff --git a/providers/common/sql/src/airflow/providers/common/sql/hooks/sql.py b/providers/common/sql/src/airflow/providers/common/sql/hooks/sql.py index 58e9a244108ed..d0cf632cf1b17 100644 --- a/providers/common/sql/src/airflow/providers/common/sql/hooks/sql.py +++ b/providers/common/sql/src/airflow/providers/common/sql/hooks/sql.py @@ -18,11 +18,11 @@ import contextlib import warnings -from collections.abc import Generator, Iterable, Mapping, MutableMapping, Sequence +from collections.abc import Callable, Generator, Iterable, Mapping, MutableMapping, Sequence from contextlib import closing, contextmanager, suppress from datetime import datetime from functools import cached_property -from typing import TYPE_CHECKING, Any, Callable, Protocol, TypeVar, cast, overload +from typing import TYPE_CHECKING, Any, Literal, Protocol, TypeVar, cast, overload from urllib.parse import urlparse import sqlparse @@ -32,7 +32,6 @@ from sqlalchemy import create_engine, inspect from sqlalchemy.engine import make_url from sqlalchemy.exc import ArgumentError, NoSuchModuleError -from typing_extensions import Literal from airflow.configuration import conf from airflow.exceptions import ( diff --git a/providers/common/sql/src/airflow/providers/common/sql/hooks/sql.pyi b/providers/common/sql/src/airflow/providers/common/sql/hooks/sql.pyi index a5caa463e47de..9c792e1bd82ea 100644 --- a/providers/common/sql/src/airflow/providers/common/sql/hooks/sql.pyi +++ b/providers/common/sql/src/airflow/providers/common/sql/hooks/sql.pyi @@ -32,9 +32,9 @@ Definition of the public interface for airflow.providers.common.sql.src.airflow. isort:skip_file """ -from collections.abc import Generator, Iterable, Mapping, MutableMapping, Sequence +from collections.abc import Callable, Generator, Iterable, Mapping, MutableMapping, Sequence from functools import cached_property as cached_property -from typing import Any, Callable, Literal, Protocol, TypeVar, overload +from typing import Any, Literal, Protocol, TypeVar, overload from _typeshed import Incomplete as Incomplete from pandas import DataFrame as PandasDataFrame diff --git a/providers/common/sql/src/airflow/providers/common/sql/operators/sql.py b/providers/common/sql/src/airflow/providers/common/sql/operators/sql.py index ddf1a8b5ce6d3..af04349532a51 100644 --- a/providers/common/sql/src/airflow/providers/common/sql/operators/sql.py +++ b/providers/common/sql/src/airflow/providers/common/sql/operators/sql.py @@ -19,9 +19,9 @@ import ast import re -from collections.abc import Iterable, Mapping, Sequence +from collections.abc import Callable, Iterable, Mapping, Sequence from functools import cached_property -from typing import TYPE_CHECKING, Any, Callable, ClassVar, NoReturn, SupportsAbs +from typing import TYPE_CHECKING, Any, ClassVar, NoReturn, SupportsAbs from airflow.exceptions import AirflowException, AirflowFailException from airflow.hooks.base import BaseHook diff --git a/providers/common/sql/src/airflow/providers/common/sql/sensors/sql.py b/providers/common/sql/src/airflow/providers/common/sql/sensors/sql.py index 2c7cb4543694b..0aa83fef2e5b8 100644 --- a/providers/common/sql/src/airflow/providers/common/sql/sensors/sql.py +++ b/providers/common/sql/src/airflow/providers/common/sql/sensors/sql.py @@ -16,9 +16,9 @@ # under the License. from __future__ import annotations -from collections.abc import Mapping, Sequence +from collections.abc import Callable, Mapping, Sequence from operator import itemgetter -from typing import TYPE_CHECKING, Any, Callable +from typing import TYPE_CHECKING, Any from airflow.exceptions import AirflowException from airflow.hooks.base import BaseHook diff --git a/providers/common/sql/src/airflow/providers/common/sql/sensors/sql.pyi b/providers/common/sql/src/airflow/providers/common/sql/sensors/sql.pyi index e2d69b6c0b952..edd3dce2326a3 100644 --- a/providers/common/sql/src/airflow/providers/common/sql/sensors/sql.pyi +++ b/providers/common/sql/src/airflow/providers/common/sql/sensors/sql.pyi @@ -32,8 +32,8 @@ Definition of the public interface for airflow.providers.common.sql.src.airflow. isort:skip_file """ -from collections.abc import Mapping, Sequence -from typing import Any, Callable +from collections.abc import Callable, Mapping, Sequence +from typing import Any from _typeshed import Incomplete as Incomplete diff --git a/providers/databricks/pyproject.toml b/providers/databricks/pyproject.toml index 2da0ace112770..632dd6bf8bf4c 100644 --- a/providers/databricks/pyproject.toml +++ b/providers/databricks/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/databricks/src/airflow/providers/databricks/hooks/databricks_base.py b/providers/databricks/src/airflow/providers/databricks/hooks/databricks_base.py index 2829c7012d32b..cba505f9988a8 100644 --- a/providers/databricks/src/airflow/providers/databricks/hooks/databricks_base.py +++ b/providers/databricks/src/airflow/providers/databricks/hooks/databricks_base.py @@ -766,7 +766,7 @@ def _get_error_code(exception: BaseException) -> str: @staticmethod def _retryable_error(exception: BaseException) -> bool: if isinstance(exception, requests_exceptions.RequestException): - if isinstance(exception, (requests_exceptions.ConnectionError, requests_exceptions.Timeout)) or ( + if isinstance(exception, requests_exceptions.ConnectionError | requests_exceptions.Timeout) or ( exception.response is not None and ( exception.response.status_code >= 500 @@ -783,7 +783,7 @@ def _retryable_error(exception: BaseException) -> bool: if exception.status >= 500 or exception.status == 429: return True - if isinstance(exception, (ClientConnectorError, TimeoutError)): + if isinstance(exception, ClientConnectorError | TimeoutError): return True return False diff --git a/providers/databricks/src/airflow/providers/databricks/hooks/databricks_sql.py b/providers/databricks/src/airflow/providers/databricks/hooks/databricks_sql.py index 11e5ceca39bea..76aea1dde5434 100644 --- a/providers/databricks/src/airflow/providers/databricks/hooks/databricks_sql.py +++ b/providers/databricks/src/airflow/providers/databricks/hooks/databricks_sql.py @@ -18,14 +18,13 @@ import threading from collections import namedtuple -from collections.abc import Iterable, Mapping, Sequence +from collections.abc import Callable, Iterable, Mapping, Sequence from contextlib import closing from copy import copy from datetime import timedelta from typing import ( TYPE_CHECKING, Any, - Callable, TypeVar, cast, overload, diff --git a/providers/databricks/src/airflow/providers/databricks/sensors/databricks_partition.py b/providers/databricks/src/airflow/providers/databricks/sensors/databricks_partition.py index 6aad4f419bcad..86ec7ca385d05 100644 --- a/providers/databricks/src/airflow/providers/databricks/sensors/databricks_partition.py +++ b/providers/databricks/src/airflow/providers/databricks/sensors/databricks_partition.py @@ -20,10 +20,10 @@ from __future__ import annotations -from collections.abc import Sequence +from collections.abc import Callable, Sequence from datetime import datetime from functools import cached_property -from typing import TYPE_CHECKING, Any, Callable +from typing import TYPE_CHECKING, Any from databricks.sql.utils import ParamEscaper @@ -205,11 +205,11 @@ def _generate_partition_query( if isinstance(partition_value, list): output_list.append(f"""{partition_col} in {tuple(partition_value)}""") self.log.debug("List formatting for partitions: %s", output_list) - if isinstance(partition_value, (int, float, complex)): + if isinstance(partition_value, int | float | complex): output_list.append( f"""{partition_col}{self.partition_operator}{self.escaper.escape_item(partition_value)}""" ) - if isinstance(partition_value, (str, datetime)): + if isinstance(partition_value, str | datetime): output_list.append( f"""{partition_col}{self.partition_operator}{self.escaper.escape_item(partition_value)}""" ) diff --git a/providers/databricks/src/airflow/providers/databricks/sensors/databricks_sql.py b/providers/databricks/src/airflow/providers/databricks/sensors/databricks_sql.py index d0f060c8487a8..1eca4c7adae4b 100644 --- a/providers/databricks/src/airflow/providers/databricks/sensors/databricks_sql.py +++ b/providers/databricks/src/airflow/providers/databricks/sensors/databricks_sql.py @@ -20,9 +20,9 @@ from __future__ import annotations -from collections.abc import Iterable, Sequence +from collections.abc import Callable, Iterable, Sequence from functools import cached_property -from typing import TYPE_CHECKING, Any, Callable +from typing import TYPE_CHECKING, Any from airflow.exceptions import AirflowException from airflow.providers.common.sql.hooks.handlers import fetch_all_handler diff --git a/providers/databricks/src/airflow/providers/databricks/utils/databricks.py b/providers/databricks/src/airflow/providers/databricks/utils/databricks.py index e19bccf290a36..8159bce1aff32 100644 --- a/providers/databricks/src/airflow/providers/databricks/utils/databricks.py +++ b/providers/databricks/src/airflow/providers/databricks/utils/databricks.py @@ -34,12 +34,12 @@ def normalise_json_content(content, json_path: str = "json") -> str | bool | lis to string type because databricks does not understand 'True' or 'False' values. """ normalise = normalise_json_content - if isinstance(content, (str, bool)): + if isinstance(content, str | bool): return content - if isinstance(content, (int, float)): + if isinstance(content, int | float): # Databricks can tolerate either numeric or string types in the API backend. return str(content) - if isinstance(content, (list, tuple)): + if isinstance(content, list | tuple): return [normalise(e, f"{json_path}[{i}]") for i, e in enumerate(content)] if isinstance(content, dict): return {k: normalise(v, f"{json_path}[{k}]") for k, v in content.items()} diff --git a/providers/datadog/pyproject.toml b/providers/datadog/pyproject.toml index 2f9770782c270..2932cb651805c 100644 --- a/providers/datadog/pyproject.toml +++ b/providers/datadog/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/datadog/src/airflow/providers/datadog/sensors/datadog.py b/providers/datadog/src/airflow/providers/datadog/sensors/datadog.py index 65768936118bc..251020febb871 100644 --- a/providers/datadog/src/airflow/providers/datadog/sensors/datadog.py +++ b/providers/datadog/src/airflow/providers/datadog/sensors/datadog.py @@ -17,7 +17,8 @@ # under the License. from __future__ import annotations -from typing import TYPE_CHECKING, Any, Callable +from collections.abc import Callable +from typing import TYPE_CHECKING, Any from datadog import api diff --git a/providers/dbt/cloud/pyproject.toml b/providers/dbt/cloud/pyproject.toml index 62c941133dab1..2cb0c972bc2b6 100644 --- a/providers/dbt/cloud/pyproject.toml +++ b/providers/dbt/cloud/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/dbt/cloud/src/airflow/providers/dbt/cloud/hooks/dbt.py b/providers/dbt/cloud/src/airflow/providers/dbt/cloud/hooks/dbt.py index 2f5ee59b2d27b..e9c37d926dbcb 100644 --- a/providers/dbt/cloud/src/airflow/providers/dbt/cloud/hooks/dbt.py +++ b/providers/dbt/cloud/src/airflow/providers/dbt/cloud/hooks/dbt.py @@ -20,11 +20,11 @@ import json import time import warnings -from collections.abc import Sequence +from collections.abc import Callable, Sequence from enum import Enum from functools import cached_property, wraps from inspect import signature -from typing import TYPE_CHECKING, Any, Callable, TypedDict, TypeVar, cast +from typing import TYPE_CHECKING, Any, TypedDict, TypeVar, cast import aiohttp from asgiref.sync import sync_to_async @@ -117,7 +117,7 @@ class DbtCloudJobRunStatus(Enum): @classmethod def check_is_valid(cls, statuses: int | Sequence[int] | set[int]): """Validate input statuses are a known value.""" - if isinstance(statuses, (Sequence, set)): + if isinstance(statuses, Sequence | set): for status in statuses: cls(status) else: diff --git a/providers/dingding/pyproject.toml b/providers/dingding/pyproject.toml index 63cd02a22ded5..0e3005a499cf4 100644 --- a/providers/dingding/pyproject.toml +++ b/providers/dingding/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/discord/pyproject.toml b/providers/discord/pyproject.toml index 986b2668f5198..53b53aa379bd1 100644 --- a/providers/discord/pyproject.toml +++ b/providers/discord/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/docker/pyproject.toml b/providers/docker/pyproject.toml index 529b9756b527c..36dd840744b0e 100644 --- a/providers/docker/pyproject.toml +++ b/providers/docker/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/docker/src/airflow/providers/docker/decorators/docker.py b/providers/docker/src/airflow/providers/docker/decorators/docker.py index 36451a3d46395..807902d5e8170 100644 --- a/providers/docker/src/airflow/providers/docker/decorators/docker.py +++ b/providers/docker/src/airflow/providers/docker/decorators/docker.py @@ -18,9 +18,9 @@ import base64 import os -from collections.abc import Sequence +from collections.abc import Callable, Sequence from tempfile import TemporaryDirectory -from typing import TYPE_CHECKING, Any, Callable +from typing import TYPE_CHECKING, Any from airflow.providers.docker.version_compat import AIRFLOW_V_3_0_PLUS diff --git a/providers/docker/src/airflow/providers/docker/operators/docker.py b/providers/docker/src/airflow/providers/docker/operators/docker.py index ba2a7adef9151..7c75bb926e9ec 100644 --- a/providers/docker/src/airflow/providers/docker/operators/docker.py +++ b/providers/docker/src/airflow/providers/docker/operators/docker.py @@ -27,13 +27,12 @@ from functools import cached_property from io import BytesIO, StringIO from tempfile import TemporaryDirectory -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Literal from docker.constants import DEFAULT_TIMEOUT_SECONDS from docker.errors import APIError from docker.types import LogConfig, Mount, Ulimit from dotenv import dotenv_values -from typing_extensions import Literal from airflow.models import BaseOperator from airflow.providers.docker.exceptions import ( diff --git a/providers/edge3/pyproject.toml b/providers/edge3/pyproject.toml index 302d6f2afafd8..e5f4165d970a9 100644 --- a/providers/edge3/pyproject.toml +++ b/providers/edge3/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/edge3/src/airflow/providers/edge3/example_dags/win_test.py b/providers/edge3/src/airflow/providers/edge3/example_dags/win_test.py index 27563452703d2..8de7b50201d2c 100644 --- a/providers/edge3/src/airflow/providers/edge3/example_dags/win_test.py +++ b/providers/edge3/src/airflow/providers/edge3/example_dags/win_test.py @@ -26,11 +26,11 @@ from __future__ import annotations import os -from collections.abc import Container, Sequence +from collections.abc import Callable, Container, Sequence from datetime import datetime from subprocess import STDOUT, Popen from time import sleep -from typing import TYPE_CHECKING, Any, Callable +from typing import TYPE_CHECKING, Any from airflow.decorators import task, task_group from airflow.exceptions import AirflowException, AirflowNotFoundException, AirflowSkipException diff --git a/providers/edge3/src/airflow/providers/edge3/executors/edge_executor.py b/providers/edge3/src/airflow/providers/edge3/executors/edge_executor.py index 37659bfc17e14..827e03498f299 100644 --- a/providers/edge3/src/airflow/providers/edge3/executors/edge_executor.py +++ b/providers/edge3/src/airflow/providers/edge3/executors/edge_executor.py @@ -21,7 +21,7 @@ from collections.abc import Sequence from copy import deepcopy from datetime import datetime, timedelta -from typing import TYPE_CHECKING, Any, Optional +from typing import TYPE_CHECKING, Any from sqlalchemy import delete, inspect, text from sqlalchemy.exc import NoSuchTableError @@ -52,7 +52,7 @@ # TODO: Airflow 2 type hints; remove when Airflow 2 support is removed CommandType = Sequence[str] # Task tuple to send to be executed - TaskTuple = tuple[TaskInstanceKey, CommandType, Optional[str], Optional[Any]] + TaskTuple = tuple[TaskInstanceKey, CommandType, str | None, Any | None] PARALLELISM: int = conf.getint("core", "PARALLELISM") diff --git a/providers/edge3/src/airflow/providers/edge3/worker_api/routes/_v2_compat.py b/providers/edge3/src/airflow/providers/edge3/worker_api/routes/_v2_compat.py index 2da0f0e23facc..73378d0e31cd0 100644 --- a/providers/edge3/src/airflow/providers/edge3/worker_api/routes/_v2_compat.py +++ b/providers/edge3/src/airflow/providers/edge3/worker_api/routes/_v2_compat.py @@ -35,7 +35,7 @@ def parse_command(command: str) -> ExecuteTask: return ExecuteTask.model_validate_json(command) else: # Mock the external dependencies - from typing import Callable + from collections.abc import Callable from connexion import ProblemException diff --git a/providers/elasticsearch/pyproject.toml b/providers/elasticsearch/pyproject.toml index 260c466b8695e..c77c0f1e983dd 100644 --- a/providers/elasticsearch/pyproject.toml +++ b/providers/elasticsearch/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/elasticsearch/src/airflow/providers/elasticsearch/log/es_response.py b/providers/elasticsearch/src/airflow/providers/elasticsearch/log/es_response.py index 610b03f96e199..221a45fdb3b94 100644 --- a/providers/elasticsearch/src/airflow/providers/elasticsearch/log/es_response.py +++ b/providers/elasticsearch/src/airflow/providers/elasticsearch/log/es_response.py @@ -130,7 +130,7 @@ def __iter__(self) -> Iterator[Hit]: def __getitem__(self, key): """Retrieve a specific hit or a slice of hits from the Elasticsearch response.""" - if isinstance(key, (slice, int)): + if isinstance(key, slice | int): return self.hits[key] return super().__getitem__(key) diff --git a/providers/elasticsearch/src/airflow/providers/elasticsearch/log/es_task_handler.py b/providers/elasticsearch/src/airflow/providers/elasticsearch/log/es_task_handler.py index ee5fc205902b2..3cd8fa5e56766 100644 --- a/providers/elasticsearch/src/airflow/providers/elasticsearch/log/es_task_handler.py +++ b/providers/elasticsearch/src/airflow/providers/elasticsearch/log/es_task_handler.py @@ -27,8 +27,9 @@ import sys import time from collections import defaultdict +from collections.abc import Callable from operator import attrgetter -from typing import TYPE_CHECKING, Any, Callable, Literal +from typing import TYPE_CHECKING, Any, Literal from urllib.parse import quote, urlparse # Using `from elasticsearch import *` would break elasticsearch mocking used in unit test. @@ -57,11 +58,9 @@ from airflow.models.taskinstance import TaskInstance, TaskInstanceKey if AIRFLOW_V_3_0_PLUS: - from typing import Union - from airflow.utils.log.file_task_handler import StructuredLogMessage - EsLogMsgType = Union[list[StructuredLogMessage], str] + EsLogMsgType = list[StructuredLogMessage] | str else: EsLogMsgType = list[tuple[str, str]] # type: ignore[misc] diff --git a/providers/elasticsearch/tests/unit/elasticsearch/log/elasticmock/utilities/__init__.py b/providers/elasticsearch/tests/unit/elasticsearch/log/elasticmock/utilities/__init__.py index 50b883e0f02f3..abd3606b1e056 100644 --- a/providers/elasticsearch/tests/unit/elasticsearch/log/elasticmock/utilities/__init__.py +++ b/providers/elasticsearch/tests/unit/elasticsearch/log/elasticmock/utilities/__init__.py @@ -177,7 +177,7 @@ def _base64_auth_header(auth_value): and returns a base64-encoded string to be used as an HTTP authorization header. """ - if isinstance(auth_value, (list, tuple)): + if isinstance(auth_value, list | tuple): auth_value = base64.b64encode(to_bytes(":".join(auth_value))) return to_str(auth_value) @@ -189,11 +189,11 @@ def _escape(value): """ # make sequences into comma-separated strings - if isinstance(value, (list, tuple)): + if isinstance(value, list | tuple): value = ",".join(value) # dates and datetimes into isoformat - elif isinstance(value, (date, datetime)): + elif isinstance(value, date | datetime): value = value.isoformat() # make bools into true/false strings diff --git a/providers/exasol/pyproject.toml b/providers/exasol/pyproject.toml index f6987a182cd48..158befc0c809c 100644 --- a/providers/exasol/pyproject.toml +++ b/providers/exasol/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/exasol/src/airflow/providers/exasol/hooks/exasol.py b/providers/exasol/src/airflow/providers/exasol/hooks/exasol.py index cbd59bfb294f6..bf996cf096a40 100644 --- a/providers/exasol/src/airflow/providers/exasol/hooks/exasol.py +++ b/providers/exasol/src/airflow/providers/exasol/hooks/exasol.py @@ -17,9 +17,9 @@ # under the License. from __future__ import annotations -from collections.abc import Iterable, Mapping, Sequence +from collections.abc import Callable, Iterable, Mapping, Sequence from contextlib import closing -from typing import TYPE_CHECKING, Any, Callable, TypeVar, overload +from typing import TYPE_CHECKING, Any, TypeVar, overload import pyexasol from deprecated import deprecated diff --git a/providers/fab/pyproject.toml b/providers/fab/pyproject.toml index bf16575fd921f..c339a9681b754 100644 --- a/providers/fab/pyproject.toml +++ b/providers/fab/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" license-files = ["NOTICE", "*/LICENSE*"] # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/fab/src/airflow/providers/fab/auth_manager/api/auth/backend/basic_auth.py b/providers/fab/src/airflow/providers/fab/auth_manager/api/auth/backend/basic_auth.py index bf48beff41c9a..dddb13646e53f 100644 --- a/providers/fab/src/airflow/providers/fab/auth_manager/api/auth/backend/basic_auth.py +++ b/providers/fab/src/airflow/providers/fab/auth_manager/api/auth/backend/basic_auth.py @@ -18,8 +18,9 @@ from __future__ import annotations +from collections.abc import Callable from functools import wraps -from typing import TYPE_CHECKING, Any, Callable, TypeVar, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from flask import Response, current_app, request from flask_appbuilder.const import AUTH_LDAP diff --git a/providers/fab/src/airflow/providers/fab/auth_manager/api/auth/backend/kerberos_auth.py b/providers/fab/src/airflow/providers/fab/auth_manager/api/auth/backend/kerberos_auth.py index d419524bf1043..41341081061b7 100644 --- a/providers/fab/src/airflow/providers/fab/auth_manager/api/auth/backend/kerberos_auth.py +++ b/providers/fab/src/airflow/providers/fab/auth_manager/api/auth/backend/kerberos_auth.py @@ -19,8 +19,9 @@ import logging import os +from collections.abc import Callable from functools import wraps -from typing import TYPE_CHECKING, Any, Callable, NamedTuple, TypeVar, cast +from typing import TYPE_CHECKING, Any, NamedTuple, TypeVar, cast import kerberos from flask import Response, current_app, g, make_response, request diff --git a/providers/fab/src/airflow/providers/fab/auth_manager/api/auth/backend/session.py b/providers/fab/src/airflow/providers/fab/auth_manager/api/auth/backend/session.py index 8b39c0e8f775c..75051decef738 100644 --- a/providers/fab/src/airflow/providers/fab/auth_manager/api/auth/backend/session.py +++ b/providers/fab/src/airflow/providers/fab/auth_manager/api/auth/backend/session.py @@ -18,8 +18,9 @@ from __future__ import annotations +from collections.abc import Callable from functools import wraps -from typing import Any, Callable, TypeVar, cast +from typing import Any, TypeVar, cast from flask import Response diff --git a/providers/fab/src/airflow/providers/fab/www/api_connexion/parameters.py b/providers/fab/src/airflow/providers/fab/www/api_connexion/parameters.py index f60f2dce23bdc..6d1c7324b9c1a 100644 --- a/providers/fab/src/airflow/providers/fab/www/api_connexion/parameters.py +++ b/providers/fab/src/airflow/providers/fab/www/api_connexion/parameters.py @@ -17,9 +17,9 @@ from __future__ import annotations import logging -from collections.abc import Container +from collections.abc import Callable, Container from functools import wraps -from typing import TYPE_CHECKING, Any, Callable, TypeVar, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from pendulum.parsing import ParserError from sqlalchemy import text diff --git a/providers/fab/src/airflow/providers/fab/www/api_connexion/security.py b/providers/fab/src/airflow/providers/fab/www/api_connexion/security.py index f238e9de9b356..b1ed503e52b34 100644 --- a/providers/fab/src/airflow/providers/fab/www/api_connexion/security.py +++ b/providers/fab/src/airflow/providers/fab/www/api_connexion/security.py @@ -16,8 +16,9 @@ # under the License. from __future__ import annotations +from collections.abc import Callable from functools import wraps -from typing import TYPE_CHECKING, Callable, TypeVar, cast +from typing import TYPE_CHECKING, TypeVar, cast from flask import Response, current_app diff --git a/providers/fab/src/airflow/providers/fab/www/api_connexion/types.py b/providers/fab/src/airflow/providers/fab/www/api_connexion/types.py index f17f2a0d2712b..706436a807417 100644 --- a/providers/fab/src/airflow/providers/fab/www/api_connexion/types.py +++ b/providers/fab/src/airflow/providers/fab/www/api_connexion/types.py @@ -17,14 +17,12 @@ from __future__ import annotations from collections.abc import Mapping, Sequence -from typing import Any, Optional, Union +from typing import Any from flask import Response -APIResponse = Union[ - Response, - tuple[object, int], # For '(NoContent, 201)'. - Mapping[str, Any], # JSON. -] +# tuple[object, int] For '(NoContent, 201)'. +# Mapping[str, Any] for json +APIResponse = Response | tuple[object, int] | Mapping[str, Any] -UpdateMask = Optional[Sequence[str]] +UpdateMask = Sequence[str] | None diff --git a/providers/fab/src/airflow/providers/fab/www/auth.py b/providers/fab/src/airflow/providers/fab/www/auth.py index a01ab8c9e1f9b..5cd35e4600dbd 100644 --- a/providers/fab/src/airflow/providers/fab/www/auth.py +++ b/providers/fab/src/airflow/providers/fab/www/auth.py @@ -18,9 +18,9 @@ import functools import logging -from collections.abc import Sequence +from collections.abc import Callable, Sequence from functools import wraps -from typing import TYPE_CHECKING, Callable, TypeVar, cast +from typing import TYPE_CHECKING, TypeVar, cast from flask import flash, redirect, render_template, request, url_for from flask_appbuilder._compat import as_unicode diff --git a/providers/fab/src/airflow/providers/fab/www/security_manager.py b/providers/fab/src/airflow/providers/fab/www/security_manager.py index 7f3e4ef262035..fc968607de467 100644 --- a/providers/fab/src/airflow/providers/fab/www/security_manager.py +++ b/providers/fab/src/airflow/providers/fab/www/security_manager.py @@ -16,7 +16,7 @@ # under the License. from __future__ import annotations -from typing import Callable +from collections.abc import Callable from flask import g from flask_limiter import Limiter diff --git a/providers/fab/tests/unit/fab/auth_manager/api_endpoints/remote_user_api_auth_backend.py b/providers/fab/tests/unit/fab/auth_manager/api_endpoints/remote_user_api_auth_backend.py index 247bb168fd042..976c9123e25e2 100644 --- a/providers/fab/tests/unit/fab/auth_manager/api_endpoints/remote_user_api_auth_backend.py +++ b/providers/fab/tests/unit/fab/auth_manager/api_endpoints/remote_user_api_auth_backend.py @@ -20,8 +20,9 @@ from __future__ import annotations import logging +from collections.abc import Callable from functools import wraps -from typing import TYPE_CHECKING, Callable, TypeVar, cast +from typing import TYPE_CHECKING, TypeVar, cast from flask import Response, request from flask_login import login_user diff --git a/providers/fab/www-hash.txt b/providers/fab/www-hash.txt index 5bd3143ffe080..c0a335802faa4 100644 --- a/providers/fab/www-hash.txt +++ b/providers/fab/www-hash.txt @@ -1 +1 @@ -502f77e55e8918f27d4a37d323a684d717b7e8f2070bf2c09b05dc8cad5b16ee +e628a0561343f95bb6547d755d0cd8908e0d556b299a333c91cb8884bf3ee8ae diff --git a/providers/facebook/pyproject.toml b/providers/facebook/pyproject.toml index 7b413b35c718f..7c012422d74c8 100644 --- a/providers/facebook/pyproject.toml +++ b/providers/facebook/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/ftp/pyproject.toml b/providers/ftp/pyproject.toml index 8b6bd02c17f26..eb56f078d6b50 100644 --- a/providers/ftp/pyproject.toml +++ b/providers/ftp/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/ftp/src/airflow/providers/ftp/hooks/ftp.py b/providers/ftp/src/airflow/providers/ftp/hooks/ftp.py index ede52345a8fa2..3a2593f7d24a4 100644 --- a/providers/ftp/src/airflow/providers/ftp/hooks/ftp.py +++ b/providers/ftp/src/airflow/providers/ftp/hooks/ftp.py @@ -20,7 +20,8 @@ import datetime import ftplib # nosec: B402 import logging -from typing import Any, Callable +from collections.abc import Callable +from typing import Any from airflow.hooks.base import BaseHook diff --git a/providers/git/pyproject.toml b/providers/git/pyproject.toml index 9926b9f237028..35d4fa0b44b4d 100644 --- a/providers/git/pyproject.toml +++ b/providers/git/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/github/pyproject.toml b/providers/github/pyproject.toml index 50d9c0e6529d3..0839391fbe858 100644 --- a/providers/github/pyproject.toml +++ b/providers/github/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/github/src/airflow/providers/github/operators/github.py b/providers/github/src/airflow/providers/github/operators/github.py index 82996d3ecedb4..fb368b9280e32 100644 --- a/providers/github/src/airflow/providers/github/operators/github.py +++ b/providers/github/src/airflow/providers/github/operators/github.py @@ -17,7 +17,8 @@ # under the License. from __future__ import annotations -from typing import TYPE_CHECKING, Any, Callable +from collections.abc import Callable +from typing import TYPE_CHECKING, Any from github import GithubException diff --git a/providers/github/src/airflow/providers/github/sensors/github.py b/providers/github/src/airflow/providers/github/sensors/github.py index 8d7732823095c..b234168494836 100644 --- a/providers/github/src/airflow/providers/github/sensors/github.py +++ b/providers/github/src/airflow/providers/github/sensors/github.py @@ -17,7 +17,8 @@ # under the License. from __future__ import annotations -from typing import TYPE_CHECKING, Any, Callable +from collections.abc import Callable +from typing import TYPE_CHECKING, Any from github import GithubException diff --git a/providers/google/pyproject.toml b/providers/google/pyproject.toml index ba0d3a7f0aa67..c19467f4bdc54 100644 --- a/providers/google/pyproject.toml +++ b/providers/google/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/google/src/airflow/providers/google/cloud/hooks/bigquery.py b/providers/google/src/airflow/providers/google/cloud/hooks/bigquery.py index 0230ab5f57d7c..a0f635c584ab4 100644 --- a/providers/google/src/airflow/providers/google/cloud/hooks/bigquery.py +++ b/providers/google/src/airflow/providers/google/cloud/hooks/bigquery.py @@ -29,7 +29,7 @@ from collections.abc import Iterable, Mapping, Sequence from copy import deepcopy from datetime import datetime, timedelta -from typing import TYPE_CHECKING, Any, NoReturn, Union, cast, overload +from typing import TYPE_CHECKING, Any, Literal, NoReturn, cast, overload from aiohttp import ClientSession as ClientSession from gcloud.aio.bigquery import Job, Table as Table_async @@ -57,7 +57,6 @@ from pandas_gbq import read_gbq from pandas_gbq.gbq import GbqConnector # noqa: F401 used in ``airflow.contrib.hooks.bigquery`` from sqlalchemy import create_engine -from typing_extensions import Literal from airflow.exceptions import ( AirflowException, @@ -89,7 +88,7 @@ log = logging.getLogger(__name__) -BigQueryJob = Union[CopyJob, QueryJob, LoadJob, ExtractJob] +BigQueryJob = CopyJob | QueryJob | LoadJob | ExtractJob class BigQueryHook(GoogleBaseHook, DbApiHook): diff --git a/providers/google/src/airflow/providers/google/cloud/hooks/dataflow.py b/providers/google/src/airflow/providers/google/cloud/hooks/dataflow.py index a9b7e07977a7b..018ba9e9b0672 100644 --- a/providers/google/src/airflow/providers/google/cloud/hooks/dataflow.py +++ b/providers/google/src/airflow/providers/google/cloud/hooks/dataflow.py @@ -27,9 +27,9 @@ import time import uuid import warnings -from collections.abc import Generator, Sequence +from collections.abc import Callable, Generator, Sequence from copy import deepcopy -from typing import TYPE_CHECKING, Any, Callable, TypeVar, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from google.cloud.dataflow_v1beta3 import ( GetJobRequest, diff --git a/providers/google/src/airflow/providers/google/cloud/hooks/gcs.py b/providers/google/src/airflow/providers/google/cloud/hooks/gcs.py index 74aea85fd7934..6e32c49043271 100644 --- a/providers/google/src/airflow/providers/google/cloud/hooks/gcs.py +++ b/providers/google/src/airflow/providers/google/cloud/hooks/gcs.py @@ -26,12 +26,12 @@ import shutil import time import warnings -from collections.abc import Generator, Sequence +from collections.abc import Callable, Generator, Sequence from contextlib import contextmanager from functools import partial from io import BytesIO from tempfile import NamedTemporaryFile -from typing import IO, TYPE_CHECKING, Any, Callable, TypeVar, cast, overload +from typing import IO, TYPE_CHECKING, Any, TypeVar, cast, overload from urllib.parse import urlsplit from gcloud.aio.storage import Storage diff --git a/providers/google/src/airflow/providers/google/cloud/hooks/mlengine.py b/providers/google/src/airflow/providers/google/cloud/hooks/mlengine.py index e37bc13c89802..b79d991227d7b 100644 --- a/providers/google/src/airflow/providers/google/cloud/hooks/mlengine.py +++ b/providers/google/src/airflow/providers/google/cloud/hooks/mlengine.py @@ -23,7 +23,8 @@ import logging import random import time -from typing import TYPE_CHECKING, Callable +from collections.abc import Callable +from typing import TYPE_CHECKING from aiohttp import ClientSession from gcloud.aio.auth import AioSession, Token diff --git a/providers/google/src/airflow/providers/google/cloud/hooks/spanner.py b/providers/google/src/airflow/providers/google/cloud/hooks/spanner.py index 24bea1e9566d5..93ace3ff19606 100644 --- a/providers/google/src/airflow/providers/google/cloud/hooks/spanner.py +++ b/providers/google/src/airflow/providers/google/cloud/hooks/spanner.py @@ -19,8 +19,8 @@ from __future__ import annotations -from collections.abc import Sequence -from typing import TYPE_CHECKING, Callable, NamedTuple +from collections.abc import Callable, Sequence +from typing import TYPE_CHECKING, NamedTuple from google.api_core.exceptions import AlreadyExists, GoogleAPICallError from google.cloud.spanner_v1.client import Client diff --git a/providers/google/src/airflow/providers/google/cloud/hooks/vertex_ai/ray.py b/providers/google/src/airflow/providers/google/cloud/hooks/vertex_ai/ray.py index 5aede4a0465e7..64054d1460802 100644 --- a/providers/google/src/airflow/providers/google/cloud/hooks/vertex_ai/ray.py +++ b/providers/google/src/airflow/providers/google/cloud/hooks/vertex_ai/ray.py @@ -57,7 +57,7 @@ def serialize_cluster_obj(self, cluster_obj: resources.Cluster) -> dict: """Serialize Cluster dataclass to dict.""" def __encode_value(value: Any) -> Any: - if isinstance(value, (list, Repeated)): + if isinstance(value, list | Repeated): return [__encode_value(nested_value) for nested_value in value] if isinstance(value, ScalarMapContainer): return {key: __encode_value(nested_value) for key, nested_value in dict(value).items()} diff --git a/providers/google/src/airflow/providers/google/cloud/hooks/vision.py b/providers/google/src/airflow/providers/google/cloud/hooks/vision.py index a144ffad04add..ecbfa7c83f4c0 100644 --- a/providers/google/src/airflow/providers/google/cloud/hooks/vision.py +++ b/providers/google/src/airflow/providers/google/cloud/hooks/vision.py @@ -19,10 +19,10 @@ from __future__ import annotations -from collections.abc import Sequence +from collections.abc import Callable, Sequence from copy import deepcopy from functools import cached_property -from typing import TYPE_CHECKING, Any, Callable +from typing import TYPE_CHECKING, Any from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault from google.cloud.vision_v1 import ( diff --git a/providers/google/src/airflow/providers/google/cloud/links/base.py b/providers/google/src/airflow/providers/google/cloud/links/base.py index da5d68bfbe98b..b24b780c61318 100644 --- a/providers/google/src/airflow/providers/google/cloud/links/base.py +++ b/providers/google/src/airflow/providers/google/cloud/links/base.py @@ -106,7 +106,7 @@ def get_link( ti_key: TaskInstanceKey, ) -> str: if TYPE_CHECKING: - assert isinstance(operator, (GoogleCloudBaseOperator, BaseSensorOperator)) + assert isinstance(operator, GoogleCloudBaseOperator | BaseSensorOperator) conf = self.get_config(operator, ti_key) if not conf: diff --git a/providers/google/src/airflow/providers/google/cloud/operators/pubsub.py b/providers/google/src/airflow/providers/google/cloud/operators/pubsub.py index 4df7586c5e915..c24cacae49368 100644 --- a/providers/google/src/airflow/providers/google/cloud/operators/pubsub.py +++ b/providers/google/src/airflow/providers/google/cloud/operators/pubsub.py @@ -25,8 +25,8 @@ from __future__ import annotations -from collections.abc import Sequence -from typing import TYPE_CHECKING, Any, Callable +from collections.abc import Callable, Sequence +from typing import TYPE_CHECKING, Any from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault from google.cloud.pubsub_v1.types import ( diff --git a/providers/google/src/airflow/providers/google/cloud/sensors/bigquery_dts.py b/providers/google/src/airflow/providers/google/cloud/sensors/bigquery_dts.py index c89763f9b6c49..d7572a05a0fb0 100644 --- a/providers/google/src/airflow/providers/google/cloud/sensors/bigquery_dts.py +++ b/providers/google/src/airflow/providers/google/cloud/sensors/bigquery_dts.py @@ -112,7 +112,7 @@ def __init__( self.location = location def _normalize_state_list(self, states) -> set[TransferState]: - states = {states} if isinstance(states, (str, TransferState, int)) else states + states = {states} if isinstance(states, str | TransferState | int) else states result = set() for state in states: if isinstance(state, str): diff --git a/providers/google/src/airflow/providers/google/cloud/sensors/dataflow.py b/providers/google/src/airflow/providers/google/cloud/sensors/dataflow.py index a82401ef49d48..38d131851e2c9 100644 --- a/providers/google/src/airflow/providers/google/cloud/sensors/dataflow.py +++ b/providers/google/src/airflow/providers/google/cloud/sensors/dataflow.py @@ -19,9 +19,9 @@ from __future__ import annotations -from collections.abc import Sequence +from collections.abc import Callable, Sequence from functools import cached_property -from typing import TYPE_CHECKING, Any, Callable +from typing import TYPE_CHECKING, Any from airflow.configuration import conf from airflow.exceptions import AirflowException diff --git a/providers/google/src/airflow/providers/google/cloud/sensors/gcs.py b/providers/google/src/airflow/providers/google/cloud/sensors/gcs.py index 92f3522db1fca..ccf3829cd50cc 100644 --- a/providers/google/src/airflow/providers/google/cloud/sensors/gcs.py +++ b/providers/google/src/airflow/providers/google/cloud/sensors/gcs.py @@ -21,9 +21,9 @@ import os import textwrap -from collections.abc import Sequence +from collections.abc import Callable, Sequence from datetime import datetime, timedelta -from typing import TYPE_CHECKING, Any, Callable +from typing import TYPE_CHECKING, Any from google.cloud.storage.retry import DEFAULT_RETRY diff --git a/providers/google/src/airflow/providers/google/cloud/sensors/pubsub.py b/providers/google/src/airflow/providers/google/cloud/sensors/pubsub.py index c52c5ecb0c39e..c93c7fcc5457d 100644 --- a/providers/google/src/airflow/providers/google/cloud/sensors/pubsub.py +++ b/providers/google/src/airflow/providers/google/cloud/sensors/pubsub.py @@ -19,9 +19,9 @@ from __future__ import annotations -from collections.abc import Sequence +from collections.abc import Callable, Sequence from datetime import timedelta -from typing import TYPE_CHECKING, Any, Callable +from typing import TYPE_CHECKING, Any from google.cloud import pubsub_v1 from google.cloud.pubsub_v1.types import ReceivedMessage diff --git a/providers/google/src/airflow/providers/google/cloud/transfers/cassandra_to_gcs.py b/providers/google/src/airflow/providers/google/cloud/transfers/cassandra_to_gcs.py index 11500011109e2..f283649faac0d 100644 --- a/providers/google/src/airflow/providers/google/cloud/transfers/cassandra_to_gcs.py +++ b/providers/google/src/airflow/providers/google/cloud/transfers/cassandra_to_gcs.py @@ -259,7 +259,7 @@ def generate_data_dict(self, names: Iterable[str], values: Any) -> dict[str, Any def convert_value(self, value: Any | None) -> Any | None: """Convert value to BQ type.""" - if not value or isinstance(value, (str, int, float, bool, dict)): + if not value or isinstance(value, str | int | float | bool | dict): return value if isinstance(value, bytes): return b64encode(value).decode("ascii") @@ -267,13 +267,13 @@ def convert_value(self, value: Any | None) -> Any | None: if self.encode_uuid: return b64encode(value.bytes).decode("ascii") return str(value) - if isinstance(value, (datetime, Date)): + if isinstance(value, datetime | Date): return str(value) if isinstance(value, Decimal): return float(value) if isinstance(value, Time): return str(value).split(".")[0] - if isinstance(value, (list, SortedSet)): + if isinstance(value, list | SortedSet): return self.convert_array_types(value) if hasattr(value, "_fields"): return self.convert_user_type(value) diff --git a/providers/google/src/airflow/providers/google/cloud/transfers/mssql_to_gcs.py b/providers/google/src/airflow/providers/google/cloud/transfers/mssql_to_gcs.py index 8f861aaee690a..17fd87e9284c5 100644 --- a/providers/google/src/airflow/providers/google/cloud/transfers/mssql_to_gcs.py +++ b/providers/google/src/airflow/providers/google/cloud/transfers/mssql_to_gcs.py @@ -114,7 +114,7 @@ def convert_type(cls, value, schema_type, **kwargs): """ if isinstance(value, decimal.Decimal): return float(value) - if isinstance(value, (datetime.date, datetime.time)): + if isinstance(value, datetime.date | datetime.time): return value.isoformat() return value diff --git a/providers/google/src/airflow/providers/google/cloud/utils/field_validator.py b/providers/google/src/airflow/providers/google/cloud/utils/field_validator.py index 4a1f25ef25a6d..41c7859a6b960 100644 --- a/providers/google/src/airflow/providers/google/cloud/utils/field_validator.py +++ b/providers/google/src/airflow/providers/google/cloud/utils/field_validator.py @@ -134,8 +134,7 @@ from __future__ import annotations import re -from collections.abc import Sequence -from typing import Callable +from collections.abc import Callable, Sequence from airflow.exceptions import AirflowException from airflow.utils.log.logging_mixin import LoggingMixin diff --git a/providers/google/src/airflow/providers/google/common/auth_backend/google_openid.py b/providers/google/src/airflow/providers/google/common/auth_backend/google_openid.py index 6a71bc4846397..6567c6f2a0050 100644 --- a/providers/google/src/airflow/providers/google/common/auth_backend/google_openid.py +++ b/providers/google/src/airflow/providers/google/common/auth_backend/google_openid.py @@ -20,8 +20,9 @@ from __future__ import annotations import logging +from collections.abc import Callable from functools import wraps -from typing import Callable, TypeVar, cast +from typing import TypeVar, cast import google import google.auth.transport.requests diff --git a/providers/google/src/airflow/providers/google/common/deprecated.py b/providers/google/src/airflow/providers/google/common/deprecated.py index 8da1618d9f716..00ea534a9a0cb 100644 --- a/providers/google/src/airflow/providers/google/common/deprecated.py +++ b/providers/google/src/airflow/providers/google/common/deprecated.py @@ -18,8 +18,9 @@ import inspect import re +from collections.abc import Callable from datetime import date, datetime -from typing import Any, Callable +from typing import Any from deprecated import deprecated as standard_deprecated from deprecated.classic import ClassicAdapter diff --git a/providers/google/src/airflow/providers/google/common/hooks/base_google.py b/providers/google/src/airflow/providers/google/common/hooks/base_google.py index db5d24c8ba625..524acb030c8f3 100644 --- a/providers/google/src/airflow/providers/google/common/hooks/base_google.py +++ b/providers/google/src/airflow/providers/google/common/hooks/base_google.py @@ -26,10 +26,10 @@ import logging import os import tempfile -from collections.abc import Generator, Sequence +from collections.abc import Callable, Generator, Sequence from contextlib import ExitStack, contextmanager from subprocess import check_output -from typing import TYPE_CHECKING, Any, Callable, TypeVar, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast import google.auth import google.oauth2.service_account @@ -93,7 +93,7 @@ def is_soft_quota_exception(exception: Exception): if isinstance(exception, Forbidden): return any(reason in error.details() for reason in INVALID_REASONS for error in exception.errors) - if isinstance(exception, (ResourceExhausted, TooManyRequests)): + if isinstance(exception, ResourceExhausted | TooManyRequests): return any(key in error.details() for key in INVALID_KEYS for error in exception.errors) return False diff --git a/providers/google/src/airflow/providers/google/suite/transfers/sql_to_sheets.py b/providers/google/src/airflow/providers/google/suite/transfers/sql_to_sheets.py index ed9243a0c900c..e791e3adb880c 100644 --- a/providers/google/src/airflow/providers/google/suite/transfers/sql_to_sheets.py +++ b/providers/google/src/airflow/providers/google/suite/transfers/sql_to_sheets.py @@ -88,7 +88,7 @@ def _data_prep(self, data): for row in data: item_list = [] for item in row: - if isinstance(item, (datetime.date, datetime.datetime)): + if isinstance(item, datetime.date | datetime.datetime): item = item.isoformat() elif isinstance(item, int): # To exclude int from the number check. pass diff --git a/providers/google/tests/system/google/cloud/dataflow/example_dataflow_native_python_async.py b/providers/google/tests/system/google/cloud/dataflow/example_dataflow_native_python_async.py index b8016a96bf4b1..33e0554f7bc4b 100644 --- a/providers/google/tests/system/google/cloud/dataflow/example_dataflow_native_python_async.py +++ b/providers/google/tests/system/google/cloud/dataflow/example_dataflow_native_python_async.py @@ -23,8 +23,8 @@ from __future__ import annotations import os +from collections.abc import Callable from datetime import datetime -from typing import Callable from airflow.exceptions import AirflowException from airflow.models.dag import DAG diff --git a/providers/google/tests/system/google/cloud/dataflow/example_dataflow_sensors_deferrable.py b/providers/google/tests/system/google/cloud/dataflow/example_dataflow_sensors_deferrable.py index 974ca6835c679..b9b110321154f 100644 --- a/providers/google/tests/system/google/cloud/dataflow/example_dataflow_sensors_deferrable.py +++ b/providers/google/tests/system/google/cloud/dataflow/example_dataflow_sensors_deferrable.py @@ -21,8 +21,8 @@ from __future__ import annotations import os +from collections.abc import Callable from datetime import datetime -from typing import Callable from airflow.exceptions import AirflowException from airflow.models.dag import DAG diff --git a/providers/grpc/pyproject.toml b/providers/grpc/pyproject.toml index f56c15dfcdf97..f083c288b0918 100644 --- a/providers/grpc/pyproject.toml +++ b/providers/grpc/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/grpc/src/airflow/providers/grpc/hooks/grpc.py b/providers/grpc/src/airflow/providers/grpc/hooks/grpc.py index 2027f40b5834f..8171e49d36f4a 100644 --- a/providers/grpc/src/airflow/providers/grpc/hooks/grpc.py +++ b/providers/grpc/src/airflow/providers/grpc/hooks/grpc.py @@ -18,8 +18,8 @@ from __future__ import annotations -from collections.abc import Generator -from typing import Any, Callable +from collections.abc import Callable, Generator +from typing import Any import grpc from google import auth as google_auth diff --git a/providers/grpc/src/airflow/providers/grpc/operators/grpc.py b/providers/grpc/src/airflow/providers/grpc/operators/grpc.py index 020852ccc1730..b877a4ddbf366 100644 --- a/providers/grpc/src/airflow/providers/grpc/operators/grpc.py +++ b/providers/grpc/src/airflow/providers/grpc/operators/grpc.py @@ -17,8 +17,8 @@ # under the License. from __future__ import annotations -from collections.abc import Sequence -from typing import TYPE_CHECKING, Any, Callable +from collections.abc import Callable, Sequence +from typing import TYPE_CHECKING, Any from airflow.models import BaseOperator from airflow.providers.grpc.hooks.grpc import GrpcHook diff --git a/providers/hashicorp/pyproject.toml b/providers/hashicorp/pyproject.toml index 238a07391acfb..c30e04e3dec3d 100644 --- a/providers/hashicorp/pyproject.toml +++ b/providers/hashicorp/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/http/pyproject.toml b/providers/http/pyproject.toml index 1d9f8db806bce..58e0a14282756 100644 --- a/providers/http/pyproject.toml +++ b/providers/http/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/http/src/airflow/providers/http/hooks/http.py b/providers/http/src/airflow/providers/http/hooks/http.py index 99281fbe0e928..7635ab67e29b6 100644 --- a/providers/http/src/airflow/providers/http/hooks/http.py +++ b/providers/http/src/airflow/providers/http/hooks/http.py @@ -18,7 +18,8 @@ from __future__ import annotations import copy -from typing import TYPE_CHECKING, Any, Callable +from collections.abc import Callable +from typing import TYPE_CHECKING, Any from urllib.parse import urlparse import aiohttp diff --git a/providers/http/src/airflow/providers/http/operators/http.py b/providers/http/src/airflow/providers/http/operators/http.py index 2c7f260f39e12..ca36ac471c7fd 100644 --- a/providers/http/src/airflow/providers/http/operators/http.py +++ b/providers/http/src/airflow/providers/http/operators/http.py @@ -19,8 +19,8 @@ import base64 import pickle -from collections.abc import Sequence -from typing import TYPE_CHECKING, Any, Callable +from collections.abc import Callable, Sequence +from typing import TYPE_CHECKING, Any from aiohttp import BasicAuth from requests import Response diff --git a/providers/http/src/airflow/providers/http/sensors/http.py b/providers/http/src/airflow/providers/http/sensors/http.py index 55d306d9be213..a9d9274afd109 100644 --- a/providers/http/src/airflow/providers/http/sensors/http.py +++ b/providers/http/src/airflow/providers/http/sensors/http.py @@ -17,9 +17,9 @@ # under the License. from __future__ import annotations -from collections.abc import Sequence +from collections.abc import Callable, Sequence from datetime import timedelta -from typing import TYPE_CHECKING, Any, Callable +from typing import TYPE_CHECKING, Any from airflow.configuration import conf from airflow.exceptions import AirflowException diff --git a/providers/http/src/airflow/providers/http/triggers/http.py b/providers/http/src/airflow/providers/http/triggers/http.py index 6c1d13b136334..3708e5e13ed69 100644 --- a/providers/http/src/airflow/providers/http/triggers/http.py +++ b/providers/http/src/airflow/providers/http/triggers/http.py @@ -142,7 +142,7 @@ async def _convert_response(client_response: ClientResponse) -> requests.Respons response.reason = str(client_response.reason) cookies = RequestsCookieJar() for k, v in client_response.cookies.items(): - cookies.set(k, v) + cookies.set(k, str(v)) # Convert Morsel to string response.cookies = cookies return response diff --git a/providers/imap/pyproject.toml b/providers/imap/pyproject.toml index 67593b84b724a..9084ee701fecc 100644 --- a/providers/imap/pyproject.toml +++ b/providers/imap/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/influxdb/pyproject.toml b/providers/influxdb/pyproject.toml index e5a78d219b59a..3d42f806d1842 100644 --- a/providers/influxdb/pyproject.toml +++ b/providers/influxdb/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/jdbc/pyproject.toml b/providers/jdbc/pyproject.toml index ae4ce6c8be386..81703dcb8114d 100644 --- a/providers/jdbc/pyproject.toml +++ b/providers/jdbc/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/jenkins/pyproject.toml b/providers/jenkins/pyproject.toml index 390c607067ec0..6d9121af4558f 100644 --- a/providers/jenkins/pyproject.toml +++ b/providers/jenkins/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/jenkins/src/airflow/providers/jenkins/operators/jenkins_job_trigger.py b/providers/jenkins/src/airflow/providers/jenkins/operators/jenkins_job_trigger.py index a4e5d6eeec3c2..0b7d83372d433 100644 --- a/providers/jenkins/src/airflow/providers/jenkins/operators/jenkins_job_trigger.py +++ b/providers/jenkins/src/airflow/providers/jenkins/operators/jenkins_job_trigger.py @@ -19,11 +19,10 @@ import ast import json -import socket import time from collections.abc import Iterable, Mapping, Sequence from functools import cached_property -from typing import Any, Union +from typing import Any from urllib.error import HTTPError, URLError import jenkins @@ -35,7 +34,7 @@ from airflow.providers.jenkins.hooks.jenkins import JenkinsHook JenkinsRequest = Mapping[str, Any] -ParamType = Union[str, dict, list, None] +ParamType = str | dict | list | None def jenkins_request_with_headers(jenkins_server: Jenkins, req: Request) -> JenkinsRequest: @@ -67,7 +66,7 @@ def jenkins_request_with_headers(jenkins_server: Jenkins, req: Request) -> Jenki if e.code == 404: raise jenkins.NotFoundException("Requested item could not be found") raise - except socket.timeout as e: + except TimeoutError as e: raise jenkins.TimeoutException(f"Error in request: {e}") except URLError as e: raise JenkinsException(f"Error in request: {e.reason}") diff --git a/providers/keycloak/pyproject.toml b/providers/keycloak/pyproject.toml index 29365f2966eec..8a0fda9d74fe3 100644 --- a/providers/keycloak/pyproject.toml +++ b/providers/keycloak/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/microsoft/azure/pyproject.toml b/providers/microsoft/azure/pyproject.toml index 34e07d0198957..68e64242e8dee 100644 --- a/providers/microsoft/azure/pyproject.toml +++ b/providers/microsoft/azure/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/asb.py b/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/asb.py index c98c99f928eac..ae51108eb43de 100644 --- a/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/asb.py +++ b/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/asb.py @@ -16,7 +16,8 @@ # under the License. from __future__ import annotations -from typing import TYPE_CHECKING, Any, Callable +from collections.abc import Callable +from typing import TYPE_CHECKING, Any from uuid import UUID, uuid4 from azure.core.exceptions import ResourceNotFoundError diff --git a/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/cosmos.py b/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/cosmos.py index da11ce10e0a16..f654c0b386190 100644 --- a/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/cosmos.py +++ b/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/cosmos.py @@ -27,7 +27,7 @@ from __future__ import annotations import uuid -from typing import TYPE_CHECKING, Any, Union +from typing import TYPE_CHECKING, Any from urllib.parse import urlparse from azure.cosmos import PartitionKey @@ -44,7 +44,7 @@ ) if TYPE_CHECKING: - PartitionKeyType = Union[str, list[str]] + PartitionKeyType = str | list[str] class AzureCosmosDBHook(BaseHook): diff --git a/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/data_factory.py b/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/data_factory.py index d2e2a6207b16c..8f90fc3769d52 100644 --- a/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/data_factory.py +++ b/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/data_factory.py @@ -35,8 +35,9 @@ import inspect import time +from collections.abc import Callable from functools import wraps -from typing import IO, TYPE_CHECKING, Any, Callable, TypeVar, Union, cast +from typing import IO, TYPE_CHECKING, Any, TypeVar, cast from asgiref.sync import sync_to_async from azure.identity import ClientSecretCredential, DefaultAzureCredential @@ -68,8 +69,8 @@ TriggerResource, ) -Credentials = Union[ClientSecretCredential, DefaultAzureCredential] -AsyncCredentials = Union[AsyncClientSecretCredential, AsyncDefaultAzureCredential] +Credentials = ClientSecretCredential | DefaultAzureCredential +AsyncCredentials = AsyncClientSecretCredential | AsyncDefaultAzureCredential T = TypeVar("T", bound=Any) diff --git a/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/data_lake.py b/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/data_lake.py index f18ba472015d1..c01dadc03b3cd 100644 --- a/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/data_lake.py +++ b/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/data_lake.py @@ -18,7 +18,7 @@ from __future__ import annotations from functools import cached_property -from typing import Any, Union +from typing import Any from azure.core.exceptions import ResourceExistsError, ResourceNotFoundError from azure.datalake.store import core, lib, multithread @@ -41,7 +41,7 @@ get_sync_default_azure_credential, ) -Credentials = Union[ClientSecretCredential, AzureIdentityCredentialAdapter, DefaultAzureCredential] +Credentials = ClientSecretCredential | AzureIdentityCredentialAdapter | DefaultAzureCredential class AzureDataLakeHook(BaseHook): diff --git a/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/synapse.py b/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/synapse.py index fe00439a0b916..9c184b378a558 100644 --- a/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/synapse.py +++ b/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/synapse.py @@ -17,7 +17,7 @@ from __future__ import annotations import time -from typing import TYPE_CHECKING, Any, Union +from typing import TYPE_CHECKING, Any from azure.core.exceptions import ServiceRequestError from azure.identity import ClientSecretCredential, DefaultAzureCredential @@ -36,7 +36,7 @@ from azure.synapse.artifacts.models import CreateRunResponse, PipelineRun from azure.synapse.spark.models import SparkBatchJobOptions -Credentials = Union[ClientSecretCredential, DefaultAzureCredential] +Credentials = ClientSecretCredential | DefaultAzureCredential class AzureSynapseSparkBatchRunStatus: diff --git a/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/wasb.py b/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/wasb.py index 7ff704e0f5e45..05cdc7ef283ce 100644 --- a/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/wasb.py +++ b/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/wasb.py @@ -29,7 +29,7 @@ import logging import os from functools import cached_property -from typing import TYPE_CHECKING, Any, Union +from typing import TYPE_CHECKING, Any from asgiref.sync import sync_to_async from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError @@ -57,7 +57,7 @@ if TYPE_CHECKING: from azure.storage.blob._models import BlobProperties -AsyncCredentials = Union[AsyncClientSecretCredential, AsyncDefaultAzureCredential] +AsyncCredentials = AsyncClientSecretCredential | AsyncDefaultAzureCredential class WasbHook(BaseHook): diff --git a/providers/microsoft/azure/src/airflow/providers/microsoft/azure/operators/asb.py b/providers/microsoft/azure/src/airflow/providers/microsoft/azure/operators/asb.py index dfd0147c9c889..d74006df4dc8e 100644 --- a/providers/microsoft/azure/src/airflow/providers/microsoft/azure/operators/asb.py +++ b/providers/microsoft/azure/src/airflow/providers/microsoft/azure/operators/asb.py @@ -16,8 +16,8 @@ # under the License. from __future__ import annotations -from collections.abc import Sequence -from typing import TYPE_CHECKING, Any, Callable +from collections.abc import Callable, Sequence +from typing import TYPE_CHECKING, Any from uuid import UUID from airflow.models import BaseOperator diff --git a/providers/microsoft/azure/src/airflow/providers/microsoft/azure/operators/msgraph.py b/providers/microsoft/azure/src/airflow/providers/microsoft/azure/operators/msgraph.py index 31d342ccc143a..4f4b3b6dc00cb 100644 --- a/providers/microsoft/azure/src/airflow/providers/microsoft/azure/operators/msgraph.py +++ b/providers/microsoft/azure/src/airflow/providers/microsoft/azure/operators/msgraph.py @@ -18,13 +18,12 @@ from __future__ import annotations import warnings -from collections.abc import Sequence +from collections.abc import Callable, Sequence from contextlib import suppress from copy import deepcopy from typing import ( TYPE_CHECKING, Any, - Callable, ) from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning, TaskDeferred diff --git a/providers/microsoft/azure/src/airflow/providers/microsoft/azure/sensors/msgraph.py b/providers/microsoft/azure/src/airflow/providers/microsoft/azure/sensors/msgraph.py index 245bd881e1ea0..d59cb13a46f89 100644 --- a/providers/microsoft/azure/src/airflow/providers/microsoft/azure/sensors/msgraph.py +++ b/providers/microsoft/azure/src/airflow/providers/microsoft/azure/sensors/msgraph.py @@ -17,8 +17,8 @@ # under the License. from __future__ import annotations -from collections.abc import Sequence -from typing import TYPE_CHECKING, Any, Callable +from collections.abc import Callable, Sequence +from typing import TYPE_CHECKING, Any from airflow.exceptions import AirflowException from airflow.providers.common.compat.standard.triggers import TimeDeltaTrigger diff --git a/providers/microsoft/mssql/pyproject.toml b/providers/microsoft/mssql/pyproject.toml index acaaad5ce1c92..e9b40d78144f3 100644 --- a/providers/microsoft/mssql/pyproject.toml +++ b/providers/microsoft/mssql/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/microsoft/psrp/pyproject.toml b/providers/microsoft/psrp/pyproject.toml index a443d51e18e9e..1a404ca3f0b5a 100644 --- a/providers/microsoft/psrp/pyproject.toml +++ b/providers/microsoft/psrp/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/microsoft/psrp/src/airflow/providers/microsoft/psrp/hooks/psrp.py b/providers/microsoft/psrp/src/airflow/providers/microsoft/psrp/hooks/psrp.py index cc5e2da178630..877c3d237f547 100644 --- a/providers/microsoft/psrp/src/airflow/providers/microsoft/psrp/hooks/psrp.py +++ b/providers/microsoft/psrp/src/airflow/providers/microsoft/psrp/hooks/psrp.py @@ -17,11 +17,11 @@ # under the License. from __future__ import annotations -from collections.abc import Generator +from collections.abc import Callable, Generator from contextlib import contextmanager from copy import copy from logging import DEBUG, ERROR, INFO, WARNING -from typing import TYPE_CHECKING, Any, Callable +from typing import TYPE_CHECKING, Any from weakref import WeakKeyDictionary from pypsrp.host import PSHost diff --git a/providers/microsoft/winrm/pyproject.toml b/providers/microsoft/winrm/pyproject.toml index 9b9dbd5d97b1f..356d6cefd3afa 100644 --- a/providers/microsoft/winrm/pyproject.toml +++ b/providers/microsoft/winrm/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/mongo/pyproject.toml b/providers/mongo/pyproject.toml index 8dbb08723e547..af17cb618ed88 100644 --- a/providers/mongo/pyproject.toml +++ b/providers/mongo/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/mongo/src/airflow/providers/mongo/hooks/mongo.py b/providers/mongo/src/airflow/providers/mongo/hooks/mongo.py index 20dbf9f0dda90..c38c704a72975 100644 --- a/providers/mongo/src/airflow/providers/mongo/hooks/mongo.py +++ b/providers/mongo/src/airflow/providers/mongo/hooks/mongo.py @@ -32,10 +32,10 @@ if TYPE_CHECKING: from types import TracebackType + from typing import Literal from pymongo.collection import Collection as MongoCollection from pymongo.command_cursor import CommandCursor - from typing_extensions import Literal from airflow.models import Connection diff --git a/providers/mysql/pyproject.toml b/providers/mysql/pyproject.toml index cc900d6d332dc..aee7f18cfd10d 100644 --- a/providers/mysql/pyproject.toml +++ b/providers/mysql/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/neo4j/pyproject.toml b/providers/neo4j/pyproject.toml index 808753ed6180a..761b15873eecb 100644 --- a/providers/neo4j/pyproject.toml +++ b/providers/neo4j/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/odbc/pyproject.toml b/providers/odbc/pyproject.toml index c975c4a7785cd..f670af470c0db 100644 --- a/providers/odbc/pyproject.toml +++ b/providers/odbc/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/openai/pyproject.toml b/providers/openai/pyproject.toml index ef4aa4e7372bb..7422c6aa3a6b3 100644 --- a/providers/openai/pyproject.toml +++ b/providers/openai/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/openai/src/airflow/providers/openai/operators/openai.py b/providers/openai/src/airflow/providers/openai/operators/openai.py index 878c5f8870eed..97356ad2f55c2 100644 --- a/providers/openai/src/airflow/providers/openai/operators/openai.py +++ b/providers/openai/src/airflow/providers/openai/operators/openai.py @@ -75,7 +75,7 @@ def hook(self) -> OpenAIHook: return OpenAIHook(conn_id=self.conn_id) def execute(self, context: Context) -> list[float]: - if not self.input_text or not isinstance(self.input_text, (str, list)): + if not self.input_text or not isinstance(self.input_text, str | list): raise ValueError( "The 'input_text' must be a non-empty string, list of strings, list of integers, or list of lists of integers." ) diff --git a/providers/openfaas/pyproject.toml b/providers/openfaas/pyproject.toml index c3f215f2ef974..293ce8bb4b951 100644 --- a/providers/openfaas/pyproject.toml +++ b/providers/openfaas/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/openlineage/pyproject.toml b/providers/openlineage/pyproject.toml index dfecc87246762..fa006f17fe469 100644 --- a/providers/openlineage/pyproject.toml +++ b/providers/openlineage/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/openlineage/src/airflow/providers/openlineage/extractors/base.py b/providers/openlineage/src/airflow/providers/openlineage/extractors/base.py index 05eed3960ca3e..61e17ac099846 100644 --- a/providers/openlineage/src/airflow/providers/openlineage/extractors/base.py +++ b/providers/openlineage/src/airflow/providers/openlineage/extractors/base.py @@ -19,7 +19,7 @@ import warnings from abc import ABC, abstractmethod -from typing import Generic, TypeVar, Union +from typing import Generic, TypeVar from attrs import Factory, define from openlineage.client.event_v2 import Dataset as OLDataset @@ -33,7 +33,7 @@ # this is not to break static checks compatibility with v1 OpenLineage facet classes DatasetSubclass = TypeVar("DatasetSubclass", bound=OLDataset) -BaseFacetSubclass = TypeVar("BaseFacetSubclass", bound=Union[BaseFacet_V1, RunFacet, JobFacet]) +BaseFacetSubclass = TypeVar("BaseFacetSubclass", bound=BaseFacet_V1 | RunFacet | JobFacet) OL_METHOD_NAME_START = "get_openlineage_facets_on_start" OL_METHOD_NAME_COMPLETE = "get_openlineage_facets_on_complete" diff --git a/providers/openlineage/src/airflow/providers/openlineage/extractors/python.py b/providers/openlineage/src/airflow/providers/openlineage/extractors/python.py index c716e28b4d8f0..37054d25d2942 100644 --- a/providers/openlineage/src/airflow/providers/openlineage/extractors/python.py +++ b/providers/openlineage/src/airflow/providers/openlineage/extractors/python.py @@ -18,7 +18,7 @@ from __future__ import annotations import inspect -from typing import Callable +from collections.abc import Callable from openlineage.client.facet_v2 import source_code_job diff --git a/providers/openlineage/src/airflow/providers/openlineage/sqlparser.py b/providers/openlineage/src/airflow/providers/openlineage/sqlparser.py index d2d37b317355f..9cb4ef6f319b8 100644 --- a/providers/openlineage/src/airflow/providers/openlineage/sqlparser.py +++ b/providers/openlineage/src/airflow/providers/openlineage/sqlparser.py @@ -17,7 +17,8 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, Callable, TypedDict +from collections.abc import Callable +from typing import TYPE_CHECKING, TypedDict import sqlparse from attrs import define diff --git a/providers/openlineage/src/airflow/providers/openlineage/utils/sql.py b/providers/openlineage/src/airflow/providers/openlineage/utils/sql.py index 1223a3618bd7b..2143dd2f19fd7 100644 --- a/providers/openlineage/src/airflow/providers/openlineage/utils/sql.py +++ b/providers/openlineage/src/airflow/providers/openlineage/utils/sql.py @@ -20,7 +20,7 @@ from collections import defaultdict from contextlib import closing from enum import IntEnum -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING from attrs import define from openlineage.client.event_v2 import Dataset @@ -50,7 +50,7 @@ class ColumnIndex(IntEnum): DATABASE = 5 -TablesHierarchy = dict[Optional[str], dict[Optional[str], list[str]]] +TablesHierarchy = dict[str | None, dict[str | None, list[str]]] @define diff --git a/providers/openlineage/src/airflow/providers/openlineage/utils/utils.py b/providers/openlineage/src/airflow/providers/openlineage/utils/utils.py index 3d810822f6d41..9134cdb191a61 100644 --- a/providers/openlineage/src/airflow/providers/openlineage/utils/utils.py +++ b/providers/openlineage/src/airflow/providers/openlineage/utils/utils.py @@ -20,10 +20,11 @@ import datetime import json import logging +from collections.abc import Callable from contextlib import suppress from functools import wraps from importlib import metadata -from typing import TYPE_CHECKING, Any, Callable +from typing import TYPE_CHECKING, Any import attrs from openlineage.client.facet_v2 import parent_run @@ -205,7 +206,7 @@ def get_user_provided_run_facets(ti: TaskInstance, ti_state: TaskInstanceState) def get_fully_qualified_class_name(operator: BaseOperator | MappedOperator) -> str: - if isinstance(operator, (MappedOperator, SerializedBaseOperator)): + if isinstance(operator, MappedOperator | SerializedBaseOperator): # as in airflow.api_connexion.schemas.common_schema.ClassReferenceSchema return operator._task_module + "." + operator._task_type # type: ignore op_class = get_operator_class(operator) @@ -222,7 +223,7 @@ def is_selective_lineage_enabled(obj: DAG | BaseOperator | MappedOperator) -> bo return True if isinstance(obj, DAG): return is_dag_lineage_enabled(obj) - if isinstance(obj, (BaseOperator, MappedOperator)): + if isinstance(obj, BaseOperator | MappedOperator): return is_task_lineage_enabled(obj) raise TypeError("is_selective_lineage_enabled can only be used on DAG or Operator objects") @@ -300,7 +301,7 @@ def _cast_basic_types(value): return value.isoformat() if isinstance(value, datetime.timedelta): return f"{value.total_seconds()} seconds" - if isinstance(value, (set, list, tuple)): + if isinstance(value, set | list | tuple): return str(list(value)) return value @@ -375,7 +376,7 @@ def serialize_timetable(cls, dag: DAG) -> dict[str, Any]: return serialized def _serialize_ds(ds: BaseDatasetEventInput) -> dict[str, Any]: - if isinstance(ds, (DatasetAny, DatasetAll)): + if isinstance(ds, DatasetAny | DatasetAll): return { "__type": "dataset_all" if isinstance(ds, DatasetAll) else "dataset_any", "objects": [_serialize_ds(child) for child in ds.objects], diff --git a/providers/openlineage/tests/unit/openlineage/plugins/test_listener.py b/providers/openlineage/tests/unit/openlineage/plugins/test_listener.py index 0b686727a9a59..f268cdbe3cfb7 100644 --- a/providers/openlineage/tests/unit/openlineage/plugins/test_listener.py +++ b/providers/openlineage/tests/unit/openlineage/plugins/test_listener.py @@ -18,9 +18,10 @@ import uuid from collections import defaultdict +from collections.abc import Callable from concurrent.futures import Future from contextlib import suppress -from typing import TYPE_CHECKING, Callable +from typing import TYPE_CHECKING from unittest import mock from unittest.mock import MagicMock, patch diff --git a/providers/opensearch/pyproject.toml b/providers/opensearch/pyproject.toml index fdb1cbf961f80..32427508b0f69 100644 --- a/providers/opensearch/pyproject.toml +++ b/providers/opensearch/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/opensearch/src/airflow/providers/opensearch/log/os_response.py b/providers/opensearch/src/airflow/providers/opensearch/log/os_response.py index 2827cb0f04547..4479e21b9ce45 100644 --- a/providers/opensearch/src/airflow/providers/opensearch/log/os_response.py +++ b/providers/opensearch/src/airflow/providers/opensearch/log/os_response.py @@ -130,7 +130,7 @@ def __iter__(self) -> Iterator[Hit]: def __getitem__(self, key): """Retrieve a specific hit or a slice of hits from the Elasticsearch response.""" - if isinstance(key, (slice, int)): + if isinstance(key, slice | int): return self.hits[key] return super().__getitem__(key) diff --git a/providers/opensearch/src/airflow/providers/opensearch/log/os_task_handler.py b/providers/opensearch/src/airflow/providers/opensearch/log/os_task_handler.py index bdb8659c858fd..5a6b60aae24d6 100644 --- a/providers/opensearch/src/airflow/providers/opensearch/log/os_task_handler.py +++ b/providers/opensearch/src/airflow/providers/opensearch/log/os_task_handler.py @@ -22,9 +22,10 @@ import sys import time from collections import defaultdict +from collections.abc import Callable from datetime import datetime from operator import attrgetter -from typing import TYPE_CHECKING, Any, Callable, Literal +from typing import TYPE_CHECKING, Any, Literal import pendulum from opensearchpy import OpenSearch @@ -47,11 +48,9 @@ if AIRFLOW_V_3_0_PLUS: - from typing import Union - from airflow.utils.log.file_task_handler import StructuredLogMessage - OsLogMsgType = Union[list[StructuredLogMessage], str] + OsLogMsgType = list[StructuredLogMessage] | str else: OsLogMsgType = list[tuple[str, str]] # type: ignore[misc] diff --git a/providers/opsgenie/pyproject.toml b/providers/opsgenie/pyproject.toml index 33ad6faa4577d..ecdee6d22c884 100644 --- a/providers/opsgenie/pyproject.toml +++ b/providers/opsgenie/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/oracle/pyproject.toml b/providers/oracle/pyproject.toml index 2e63d60e0aa58..c54ab47567b02 100644 --- a/providers/oracle/pyproject.toml +++ b/providers/oracle/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/oracle/src/airflow/providers/oracle/hooks/oracle.py b/providers/oracle/src/airflow/providers/oracle/hooks/oracle.py index 39b32fd119494..355a7b0d8f978 100644 --- a/providers/oracle/src/airflow/providers/oracle/hooks/oracle.py +++ b/providers/oracle/src/airflow/providers/oracle/hooks/oracle.py @@ -156,14 +156,14 @@ def get_conn(self) -> oracledb.Connection: if thick_mode is True: if self.thick_mode_lib_dir is None: self.thick_mode_lib_dir = conn.extra_dejson.get("thick_mode_lib_dir") - if not isinstance(self.thick_mode_lib_dir, (str, type(None))): + if not isinstance(self.thick_mode_lib_dir, str | type(None)): raise TypeError( f"thick_mode_lib_dir expected str or None, " f"got {type(self.thick_mode_lib_dir).__name__}" ) if self.thick_mode_config_dir is None: self.thick_mode_config_dir = conn.extra_dejson.get("thick_mode_config_dir") - if not isinstance(self.thick_mode_config_dir, (str, type(None))): + if not isinstance(self.thick_mode_config_dir, str | type(None)): raise TypeError( f"thick_mode_config_dir expected str or None, " f"got {type(self.thick_mode_config_dir).__name__}" diff --git a/providers/pagerduty/pyproject.toml b/providers/pagerduty/pyproject.toml index d708a60a75afc..11d892e73c636 100644 --- a/providers/pagerduty/pyproject.toml +++ b/providers/pagerduty/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/papermill/pyproject.toml b/providers/papermill/pyproject.toml index 48e029356b745..6e8ac0550c057 100644 --- a/providers/papermill/pyproject.toml +++ b/providers/papermill/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/pgvector/pyproject.toml b/providers/pgvector/pyproject.toml index 15c434e33483c..ac825d11f1987 100644 --- a/providers/pgvector/pyproject.toml +++ b/providers/pgvector/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/pinecone/pyproject.toml b/providers/pinecone/pyproject.toml index 6053921a664c7..136e2a8c0c1ac 100644 --- a/providers/pinecone/pyproject.toml +++ b/providers/pinecone/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/postgres/pyproject.toml b/providers/postgres/pyproject.toml index 8c4daefbc1401..9314d9cc2d38f 100644 --- a/providers/postgres/pyproject.toml +++ b/providers/postgres/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/postgres/src/airflow/providers/postgres/hooks/postgres.py b/providers/postgres/src/airflow/providers/postgres/hooks/postgres.py index b201a155bd29c..3c506656c3d6d 100644 --- a/providers/postgres/src/airflow/providers/postgres/hooks/postgres.py +++ b/providers/postgres/src/airflow/providers/postgres/hooks/postgres.py @@ -20,7 +20,7 @@ import os from contextlib import closing from copy import deepcopy -from typing import TYPE_CHECKING, Any, Union +from typing import TYPE_CHECKING, Any, TypeAlias import psycopg2 import psycopg2.extensions @@ -39,7 +39,7 @@ from airflow.providers.common.sql.dialects.dialect import Dialect from airflow.providers.openlineage.sqlparser import DatabaseInfo -CursorType = Union[DictCursor, RealDictCursor, NamedTupleCursor] +CursorType: TypeAlias = DictCursor | RealDictCursor | NamedTupleCursor class PostgresHook(DbApiHook): @@ -228,7 +228,7 @@ def _serialize_cell(cell: object, conn: connection | None = None) -> Any: :param conn: The database connection :return: The cell """ - if isinstance(cell, (dict, list)): + if isinstance(cell, dict | list): cell = Json(cell) return cell diff --git a/providers/presto/pyproject.toml b/providers/presto/pyproject.toml index e3c992e92195e..34bc2f1a9aba0 100644 --- a/providers/presto/pyproject.toml +++ b/providers/presto/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/qdrant/pyproject.toml b/providers/qdrant/pyproject.toml index 6d473726d7fbc..cd967268e840b 100644 --- a/providers/qdrant/pyproject.toml +++ b/providers/qdrant/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/redis/pyproject.toml b/providers/redis/pyproject.toml index 4acc215113c5c..6dacd74327a18 100644 --- a/providers/redis/pyproject.toml +++ b/providers/redis/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/salesforce/pyproject.toml b/providers/salesforce/pyproject.toml index c844229839fc0..26134bd4c04d4 100644 --- a/providers/salesforce/pyproject.toml +++ b/providers/salesforce/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/salesforce/src/airflow/providers/salesforce/operators/bulk.py b/providers/salesforce/src/airflow/providers/salesforce/operators/bulk.py index 4baff384efc10..fe33f5ab8f4ef 100644 --- a/providers/salesforce/src/airflow/providers/salesforce/operators/bulk.py +++ b/providers/salesforce/src/airflow/providers/salesforce/operators/bulk.py @@ -23,8 +23,9 @@ from airflow.providers.salesforce.hooks.salesforce import SalesforceHook if TYPE_CHECKING: + from typing import Literal + from simple_salesforce.bulk import SFBulkHandler - from typing_extensions import Literal try: from airflow.sdk.definitions.context import Context diff --git a/providers/samba/pyproject.toml b/providers/samba/pyproject.toml index 44f1f9a4b5ebf..572e7ad34b70b 100644 --- a/providers/samba/pyproject.toml +++ b/providers/samba/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/segment/pyproject.toml b/providers/segment/pyproject.toml index c6a926069f887..091fd96c6a638 100644 --- a/providers/segment/pyproject.toml +++ b/providers/segment/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/sendgrid/pyproject.toml b/providers/sendgrid/pyproject.toml index d1040c1897cbc..a4a3d748cf033 100644 --- a/providers/sendgrid/pyproject.toml +++ b/providers/sendgrid/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/sendgrid/src/airflow/providers/sendgrid/utils/emailer.py b/providers/sendgrid/src/airflow/providers/sendgrid/utils/emailer.py index f22a080deba78..6ab97e577aede 100644 --- a/providers/sendgrid/src/airflow/providers/sendgrid/utils/emailer.py +++ b/providers/sendgrid/src/airflow/providers/sendgrid/utils/emailer.py @@ -24,7 +24,6 @@ import mimetypes import os from collections.abc import Iterable -from typing import Union import sendgrid from sendgrid.helpers.mail import ( @@ -44,7 +43,7 @@ log = logging.getLogger(__name__) -AddressesType = Union[str, Iterable[str]] +AddressesType = str | Iterable[str] def send_email( diff --git a/providers/sftp/pyproject.toml b/providers/sftp/pyproject.toml index c0bdea87c741b..841b78d1b1fed 100644 --- a/providers/sftp/pyproject.toml +++ b/providers/sftp/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/sftp/src/airflow/providers/sftp/decorators/sensors/sftp.py b/providers/sftp/src/airflow/providers/sftp/decorators/sensors/sftp.py index 1d345ae8d0311..a55b2c67c20a0 100644 --- a/providers/sftp/src/airflow/providers/sftp/decorators/sensors/sftp.py +++ b/providers/sftp/src/airflow/providers/sftp/decorators/sensors/sftp.py @@ -17,8 +17,7 @@ from __future__ import annotations -from collections.abc import Sequence -from typing import Callable +from collections.abc import Callable, Sequence from airflow.providers.sftp.version_compat import AIRFLOW_V_3_0_PLUS diff --git a/providers/sftp/src/airflow/providers/sftp/hooks/sftp.py b/providers/sftp/src/airflow/providers/sftp/hooks/sftp.py index e13fd80fffcb1..f5e3c798b1b01 100644 --- a/providers/sftp/src/airflow/providers/sftp/hooks/sftp.py +++ b/providers/sftp/src/airflow/providers/sftp/hooks/sftp.py @@ -24,12 +24,12 @@ import os import stat import warnings -from collections.abc import Generator, Sequence +from collections.abc import Callable, Generator, Sequence from contextlib import contextmanager from fnmatch import fnmatch from io import BytesIO from pathlib import Path -from typing import TYPE_CHECKING, Any, Callable +from typing import TYPE_CHECKING, Any import asyncssh from asgiref.sync import sync_to_async diff --git a/providers/sftp/src/airflow/providers/sftp/sensors/sftp.py b/providers/sftp/src/airflow/providers/sftp/sensors/sftp.py index 4d80d9491efdc..dc7758e47f5a2 100644 --- a/providers/sftp/src/airflow/providers/sftp/sensors/sftp.py +++ b/providers/sftp/src/airflow/providers/sftp/sensors/sftp.py @@ -20,9 +20,9 @@ from __future__ import annotations import os -from collections.abc import Sequence +from collections.abc import Callable, Sequence from datetime import datetime, timedelta -from typing import TYPE_CHECKING, Any, Callable +from typing import TYPE_CHECKING, Any from paramiko.sftp import SFTP_NO_SUCH_FILE diff --git a/providers/singularity/pyproject.toml b/providers/singularity/pyproject.toml index 932881ede0359..f64a24941505d 100644 --- a/providers/singularity/pyproject.toml +++ b/providers/singularity/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/slack/pyproject.toml b/providers/slack/pyproject.toml index 6b0123cd30fe2..24677eb44aca7 100644 --- a/providers/slack/pyproject.toml +++ b/providers/slack/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/slack/src/airflow/providers/slack/hooks/slack_webhook.py b/providers/slack/src/airflow/providers/slack/hooks/slack_webhook.py index 9eb376227cd02..f9a66c43d6020 100644 --- a/providers/slack/src/airflow/providers/slack/hooks/slack_webhook.py +++ b/providers/slack/src/airflow/providers/slack/hooks/slack_webhook.py @@ -19,8 +19,9 @@ import json import warnings +from collections.abc import Callable from functools import cached_property, wraps -from typing import TYPE_CHECKING, Any, Callable +from typing import TYPE_CHECKING, Any from slack_sdk import WebhookClient diff --git a/providers/slack/src/airflow/providers/slack/operators/slack.py b/providers/slack/src/airflow/providers/slack/operators/slack.py index e4035d057a19e..aed353ea5a021 100644 --- a/providers/slack/src/airflow/providers/slack/operators/slack.py +++ b/providers/slack/src/airflow/providers/slack/operators/slack.py @@ -20,9 +20,7 @@ import json from collections.abc import Sequence from functools import cached_property -from typing import TYPE_CHECKING, Any - -from typing_extensions import Literal +from typing import TYPE_CHECKING, Any, Literal from airflow.models import BaseOperator from airflow.providers.slack.hooks.slack import SlackHook diff --git a/providers/slack/src/airflow/providers/slack/transfers/sql_to_slack.py b/providers/slack/src/airflow/providers/slack/transfers/sql_to_slack.py index 6cf42418640fc..dfe0f07e6146d 100644 --- a/providers/slack/src/airflow/providers/slack/transfers/sql_to_slack.py +++ b/providers/slack/src/airflow/providers/slack/transfers/sql_to_slack.py @@ -19,9 +19,7 @@ from collections.abc import Mapping, Sequence from functools import cached_property from tempfile import NamedTemporaryFile -from typing import TYPE_CHECKING, Any - -from typing_extensions import Literal +from typing import TYPE_CHECKING, Any, Literal from airflow.exceptions import AirflowException, AirflowSkipException from airflow.providers.slack.hooks.slack import SlackHook diff --git a/providers/smtp/pyproject.toml b/providers/smtp/pyproject.toml index b8c384991a434..7ea0cc937e5d4 100644 --- a/providers/smtp/pyproject.toml +++ b/providers/smtp/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/snowflake/pyproject.toml b/providers/snowflake/pyproject.toml index 311c2da8b3ea7..00d09db4db150 100644 --- a/providers/snowflake/pyproject.toml +++ b/providers/snowflake/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/snowflake/src/airflow/providers/snowflake/decorators/snowpark.py b/providers/snowflake/src/airflow/providers/snowflake/decorators/snowpark.py index c299fbb8691c8..a0dabd2193ceb 100644 --- a/providers/snowflake/src/airflow/providers/snowflake/decorators/snowpark.py +++ b/providers/snowflake/src/airflow/providers/snowflake/decorators/snowpark.py @@ -17,8 +17,8 @@ from __future__ import annotations -from collections.abc import Sequence -from typing import TYPE_CHECKING, Callable +from collections.abc import Callable, Sequence +from typing import TYPE_CHECKING from airflow.providers.snowflake.version_compat import AIRFLOW_V_3_0_PLUS diff --git a/providers/snowflake/src/airflow/providers/snowflake/hooks/snowflake.py b/providers/snowflake/src/airflow/providers/snowflake/hooks/snowflake.py index 88cb5f331e259..e52fa4df0fc77 100644 --- a/providers/snowflake/src/airflow/providers/snowflake/hooks/snowflake.py +++ b/providers/snowflake/src/airflow/providers/snowflake/hooks/snowflake.py @@ -19,12 +19,12 @@ import base64 import os -from collections.abc import Iterable, Mapping +from collections.abc import Callable, Iterable, Mapping from contextlib import closing, contextmanager from functools import cached_property from io import StringIO from pathlib import Path -from typing import TYPE_CHECKING, Any, Callable, TypeVar, overload +from typing import TYPE_CHECKING, Any, TypeVar, overload from urllib.parse import urlparse import requests @@ -49,7 +49,7 @@ def _try_to_boolean(value: Any): - if isinstance(value, (str, type(None))): + if isinstance(value, str | type(None)): return to_boolean(value) return value diff --git a/providers/snowflake/src/airflow/providers/snowflake/hooks/snowflake_sql_api.py b/providers/snowflake/src/airflow/providers/snowflake/hooks/snowflake_sql_api.py index d87e4cd04d37f..f3b23ad3a585d 100644 --- a/providers/snowflake/src/airflow/providers/snowflake/hooks/snowflake_sql_api.py +++ b/providers/snowflake/src/airflow/providers/snowflake/hooks/snowflake_sql_api.py @@ -417,11 +417,7 @@ def _should_retry_on_error(exception) -> bool: return exception.status in [429, 503, 504] if isinstance( exception, - ( - ConnectionError, - Timeout, - ClientConnectionError, - ), + ConnectionError | Timeout | ClientConnectionError, ): return True return False diff --git a/providers/snowflake/src/airflow/providers/snowflake/operators/snowpark.py b/providers/snowflake/src/airflow/providers/snowflake/operators/snowpark.py index 98c96923ddbfa..31127692902f4 100644 --- a/providers/snowflake/src/airflow/providers/snowflake/operators/snowpark.py +++ b/providers/snowflake/src/airflow/providers/snowflake/operators/snowpark.py @@ -17,8 +17,8 @@ from __future__ import annotations -from collections.abc import Collection, Mapping, Sequence -from typing import Any, Callable +from collections.abc import Callable, Collection, Mapping, Sequence +from typing import Any from airflow.providers.common.compat.standard.operators import PythonOperator, get_current_context from airflow.providers.snowflake.hooks.snowflake import SnowflakeHook diff --git a/providers/snowflake/src/airflow/providers/snowflake/utils/snowpark.py b/providers/snowflake/src/airflow/providers/snowflake/utils/snowpark.py index 72b1192ad46e2..28bdc5311169f 100644 --- a/providers/snowflake/src/airflow/providers/snowflake/utils/snowpark.py +++ b/providers/snowflake/src/airflow/providers/snowflake/utils/snowpark.py @@ -18,7 +18,8 @@ from __future__ import annotations import inspect -from typing import TYPE_CHECKING, Callable +from collections.abc import Callable +from typing import TYPE_CHECKING if TYPE_CHECKING: from snowflake.snowpark import Session diff --git a/providers/snowflake/tests/unit/snowflake/transfers/test_copy_into_snowflake.py b/providers/snowflake/tests/unit/snowflake/transfers/test_copy_into_snowflake.py index ed0bd3b2dc7bc..168df7d9fef2a 100644 --- a/providers/snowflake/tests/unit/snowflake/transfers/test_copy_into_snowflake.py +++ b/providers/snowflake/tests/unit/snowflake/transfers/test_copy_into_snowflake.py @@ -16,7 +16,7 @@ # under the License. from __future__ import annotations -from typing import Callable +from collections.abc import Callable from unittest import mock import pytest diff --git a/providers/sqlite/pyproject.toml b/providers/sqlite/pyproject.toml index 1dbf413d83d8e..e78cb2795dd6e 100644 --- a/providers/sqlite/pyproject.toml +++ b/providers/sqlite/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/ssh/pyproject.toml b/providers/ssh/pyproject.toml index 6395fd8d797f5..5a4f2f4374969 100644 --- a/providers/ssh/pyproject.toml +++ b/providers/ssh/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/standard/pyproject.toml b/providers/standard/pyproject.toml index 7eea0526ca40d..98acc80d4f43e 100644 --- a/providers/standard/pyproject.toml +++ b/providers/standard/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/standard/src/airflow/providers/standard/decorators/bash.py b/providers/standard/src/airflow/providers/standard/decorators/bash.py index c094e050bf7af..59dcfe33be810 100644 --- a/providers/standard/src/airflow/providers/standard/decorators/bash.py +++ b/providers/standard/src/airflow/providers/standard/decorators/bash.py @@ -18,8 +18,8 @@ from __future__ import annotations import warnings -from collections.abc import Collection, Mapping, Sequence -from typing import TYPE_CHECKING, Any, Callable, ClassVar +from collections.abc import Callable, Collection, Mapping, Sequence +from typing import TYPE_CHECKING, Any, ClassVar from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS diff --git a/providers/standard/src/airflow/providers/standard/decorators/branch_external_python.py b/providers/standard/src/airflow/providers/standard/decorators/branch_external_python.py index f055769dd8306..3241d1bbded59 100644 --- a/providers/standard/src/airflow/providers/standard/decorators/branch_external_python.py +++ b/providers/standard/src/airflow/providers/standard/decorators/branch_external_python.py @@ -16,7 +16,8 @@ # under the License. from __future__ import annotations -from typing import TYPE_CHECKING, Callable +from collections.abc import Callable +from typing import TYPE_CHECKING from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS diff --git a/providers/standard/src/airflow/providers/standard/decorators/branch_python.py b/providers/standard/src/airflow/providers/standard/decorators/branch_python.py index 598cfa40ca911..719037ac054e5 100644 --- a/providers/standard/src/airflow/providers/standard/decorators/branch_python.py +++ b/providers/standard/src/airflow/providers/standard/decorators/branch_python.py @@ -16,7 +16,8 @@ # under the License. from __future__ import annotations -from typing import TYPE_CHECKING, Callable +from collections.abc import Callable +from typing import TYPE_CHECKING from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS diff --git a/providers/standard/src/airflow/providers/standard/decorators/branch_virtualenv.py b/providers/standard/src/airflow/providers/standard/decorators/branch_virtualenv.py index e47df708ba3f6..f138f90bda25e 100644 --- a/providers/standard/src/airflow/providers/standard/decorators/branch_virtualenv.py +++ b/providers/standard/src/airflow/providers/standard/decorators/branch_virtualenv.py @@ -16,7 +16,8 @@ # under the License. from __future__ import annotations -from typing import TYPE_CHECKING, Callable +from collections.abc import Callable +from typing import TYPE_CHECKING from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS diff --git a/providers/standard/src/airflow/providers/standard/decorators/external_python.py b/providers/standard/src/airflow/providers/standard/decorators/external_python.py index 68f9f8cd5c9d4..fa399cdb1ee4d 100644 --- a/providers/standard/src/airflow/providers/standard/decorators/external_python.py +++ b/providers/standard/src/airflow/providers/standard/decorators/external_python.py @@ -16,7 +16,8 @@ # under the License. from __future__ import annotations -from typing import TYPE_CHECKING, Callable +from collections.abc import Callable +from typing import TYPE_CHECKING from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS diff --git a/providers/standard/src/airflow/providers/standard/decorators/python.py b/providers/standard/src/airflow/providers/standard/decorators/python.py index 6993beb737df0..73dab343d719e 100644 --- a/providers/standard/src/airflow/providers/standard/decorators/python.py +++ b/providers/standard/src/airflow/providers/standard/decorators/python.py @@ -16,8 +16,8 @@ # under the License. from __future__ import annotations -from collections.abc import Sequence -from typing import TYPE_CHECKING, Callable +from collections.abc import Callable, Sequence +from typing import TYPE_CHECKING from airflow.providers.standard.operators.python import PythonOperator from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS diff --git a/providers/standard/src/airflow/providers/standard/decorators/python_virtualenv.py b/providers/standard/src/airflow/providers/standard/decorators/python_virtualenv.py index 2632f1be375bc..91ef7f03c0508 100644 --- a/providers/standard/src/airflow/providers/standard/decorators/python_virtualenv.py +++ b/providers/standard/src/airflow/providers/standard/decorators/python_virtualenv.py @@ -16,7 +16,8 @@ # under the License. from __future__ import annotations -from typing import TYPE_CHECKING, Callable +from collections.abc import Callable +from typing import TYPE_CHECKING from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS diff --git a/providers/standard/src/airflow/providers/standard/decorators/sensor.py b/providers/standard/src/airflow/providers/standard/decorators/sensor.py index 2d8c02977a7b7..5310d30695ed8 100644 --- a/providers/standard/src/airflow/providers/standard/decorators/sensor.py +++ b/providers/standard/src/airflow/providers/standard/decorators/sensor.py @@ -17,8 +17,8 @@ from __future__ import annotations -from collections.abc import Sequence -from typing import TYPE_CHECKING, Callable, ClassVar +from collections.abc import Callable, Sequence +from typing import TYPE_CHECKING, ClassVar from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS diff --git a/providers/standard/src/airflow/providers/standard/decorators/short_circuit.py b/providers/standard/src/airflow/providers/standard/decorators/short_circuit.py index 41277335ae892..148a6ed828fdc 100644 --- a/providers/standard/src/airflow/providers/standard/decorators/short_circuit.py +++ b/providers/standard/src/airflow/providers/standard/decorators/short_circuit.py @@ -16,7 +16,8 @@ # under the License. from __future__ import annotations -from typing import TYPE_CHECKING, Callable +from collections.abc import Callable +from typing import TYPE_CHECKING from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS diff --git a/providers/standard/src/airflow/providers/standard/operators/bash.py b/providers/standard/src/airflow/providers/standard/operators/bash.py index 3195b33456c59..afb1ad40a7eca 100644 --- a/providers/standard/src/airflow/providers/standard/operators/bash.py +++ b/providers/standard/src/airflow/providers/standard/operators/bash.py @@ -20,9 +20,9 @@ import os import shutil import tempfile -from collections.abc import Container, Sequence +from collections.abc import Callable, Container, Sequence from functools import cached_property -from typing import TYPE_CHECKING, Any, Callable, cast +from typing import TYPE_CHECKING, Any, cast from airflow.exceptions import AirflowException, AirflowSkipException from airflow.providers.standard.hooks.subprocess import SubprocessHook, SubprocessResult, working_directory diff --git a/providers/standard/src/airflow/providers/standard/operators/python.py b/providers/standard/src/airflow/providers/standard/operators/python.py index 54ae1bc3200a6..40bb51ba9d863 100644 --- a/providers/standard/src/airflow/providers/standard/operators/python.py +++ b/providers/standard/src/airflow/providers/standard/operators/python.py @@ -29,12 +29,12 @@ import types import warnings from abc import ABCMeta, abstractmethod -from collections.abc import Collection, Container, Iterable, Mapping, Sequence +from collections.abc import Callable, Collection, Container, Iterable, Mapping, Sequence from functools import cache from itertools import chain from pathlib import Path from tempfile import TemporaryDirectory -from typing import TYPE_CHECKING, Any, Callable, NamedTuple, cast +from typing import TYPE_CHECKING, Any, NamedTuple, cast import lazy_object_proxy from packaging.requirements import InvalidRequirement, Requirement diff --git a/providers/standard/src/airflow/providers/standard/operators/trigger_dagrun.py b/providers/standard/src/airflow/providers/standard/operators/trigger_dagrun.py index f87095f754cb9..4f474c3e6996e 100644 --- a/providers/standard/src/airflow/providers/standard/operators/trigger_dagrun.py +++ b/providers/standard/src/airflow/providers/standard/operators/trigger_dagrun.py @@ -195,7 +195,7 @@ def __init__( self.logical_date = logical_date if logical_date is NOTSET: self.logical_date = NOTSET - elif logical_date is None or isinstance(logical_date, (str, datetime.datetime)): + elif logical_date is None or isinstance(logical_date, str | datetime.datetime): self.logical_date = logical_date else: raise TypeError( diff --git a/providers/standard/src/airflow/providers/standard/sensors/external_task.py b/providers/standard/src/airflow/providers/standard/sensors/external_task.py index 7c48f9a449b04..12b41e7acf55b 100644 --- a/providers/standard/src/airflow/providers/standard/sensors/external_task.py +++ b/providers/standard/src/airflow/providers/standard/sensors/external_task.py @@ -19,8 +19,8 @@ import datetime import os import warnings -from collections.abc import Collection, Iterable -from typing import TYPE_CHECKING, Any, Callable, ClassVar +from collections.abc import Callable, Collection, Iterable +from typing import TYPE_CHECKING, Any, ClassVar from airflow.configuration import conf from airflow.exceptions import AirflowSkipException @@ -79,7 +79,7 @@ class ExternalDagLink(BaseOperatorLink): def get_link(self, operator: BaseOperator, *, ti_key: TaskInstanceKey) -> str: if TYPE_CHECKING: - assert isinstance(operator, (ExternalTaskMarker, ExternalTaskSensor)) + assert isinstance(operator, ExternalTaskMarker | ExternalTaskSensor) external_dag_id = operator.external_dag_id diff --git a/providers/standard/src/airflow/providers/standard/sensors/python.py b/providers/standard/src/airflow/providers/standard/sensors/python.py index 37c7244ba2b6a..512ebab4449d3 100644 --- a/providers/standard/src/airflow/providers/standard/sensors/python.py +++ b/providers/standard/src/airflow/providers/standard/sensors/python.py @@ -17,8 +17,8 @@ # under the License. from __future__ import annotations -from collections.abc import Mapping, Sequence -from typing import TYPE_CHECKING, Any, Callable +from collections.abc import Callable, Mapping, Sequence +from typing import TYPE_CHECKING, Any from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS from airflow.utils.context import context_merge diff --git a/providers/standard/src/airflow/providers/standard/utils/skipmixin.py b/providers/standard/src/airflow/providers/standard/utils/skipmixin.py index 5cfb577cd2f7c..b7c8de2de2903 100644 --- a/providers/standard/src/airflow/providers/standard/utils/skipmixin.py +++ b/providers/standard/src/airflow/providers/standard/utils/skipmixin.py @@ -52,7 +52,7 @@ def _ensure_tasks(nodes: Iterable[DAGNode]) -> Sequence[Operator]: from airflow.models.baseoperator import BaseOperator from airflow.models.mappedoperator import MappedOperator - return [n for n in nodes if isinstance(n, (BaseOperator, MappedOperator))] + return [n for n in nodes if isinstance(n, BaseOperator | MappedOperator)] # This class should only be used in Airflow 3.0 and later. diff --git a/providers/standard/tests/unit/standard/decorators/test_python.py b/providers/standard/tests/unit/standard/decorators/test_python.py index 4bab68cc04789..578817c5a2be8 100644 --- a/providers/standard/tests/unit/standard/decorators/test_python.py +++ b/providers/standard/tests/unit/standard/decorators/test_python.py @@ -19,7 +19,6 @@ import typing from collections import namedtuple from datetime import date -from typing import Union import pytest @@ -128,7 +127,7 @@ def t1() -> str | None: def test_infer_multiple_outputs_union_type(self): @task_decorator - def t1() -> Union[str, None]: + def t1() -> str | None: return "foo" assert t1().operator.multiple_outputs is False @@ -918,7 +917,7 @@ def up1() -> str: return "example" @dag.task(multiple_outputs=multiple_outputs) - def up2(x) -> Union[dict, None]: + def up2(x) -> dict | None: if x == 2: return {"x": "example"} raise AirflowSkipException() diff --git a/providers/tableau/pyproject.toml b/providers/tableau/pyproject.toml index f57384964380c..910a04e5a4ee8 100644 --- a/providers/tableau/pyproject.toml +++ b/providers/tableau/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/telegram/pyproject.toml b/providers/telegram/pyproject.toml index 4820f728aa4a5..5bf66f133056f 100644 --- a/providers/telegram/pyproject.toml +++ b/providers/telegram/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/teradata/pyproject.toml b/providers/teradata/pyproject.toml index 3866c56d0a64a..dc86d8e2b7caa 100644 --- a/providers/teradata/pyproject.toml +++ b/providers/teradata/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/teradata/src/airflow/providers/teradata/hooks/bteq.py b/providers/teradata/src/airflow/providers/teradata/hooks/bteq.py index dae400a638825..2d9b0904e48f9 100644 --- a/providers/teradata/src/airflow/providers/teradata/hooks/bteq.py +++ b/providers/teradata/src/airflow/providers/teradata/hooks/bteq.py @@ -200,7 +200,7 @@ def _transfer_to_and_execute_bteq_on_remote( and exit_status not in ( bteq_quit_rc - if isinstance(bteq_quit_rc, (list, tuple)) + if isinstance(bteq_quit_rc, list | tuple) else [bteq_quit_rc if bteq_quit_rc is not None else 0] ) ): @@ -298,7 +298,7 @@ def execute_bteq_script_at_local( and process.returncode not in ( bteq_quit_rc - if isinstance(bteq_quit_rc, (list, tuple)) + if isinstance(bteq_quit_rc, list | tuple) else [bteq_quit_rc if bteq_quit_rc is not None else 0] ) ): diff --git a/providers/trino/pyproject.toml b/providers/trino/pyproject.toml index 2d1e882c810a9..fc611438986c4 100644 --- a/providers/trino/pyproject.toml +++ b/providers/trino/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/vertica/pyproject.toml b/providers/vertica/pyproject.toml index 08236b730e403..d3eabfb5dba8e 100644 --- a/providers/vertica/pyproject.toml +++ b/providers/vertica/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/vertica/src/airflow/providers/vertica/hooks/vertica.py b/providers/vertica/src/airflow/providers/vertica/hooks/vertica.py index 7a59bbbd02e40..b19a8ee2a0900 100644 --- a/providers/vertica/src/airflow/providers/vertica/hooks/vertica.py +++ b/providers/vertica/src/airflow/providers/vertica/hooks/vertica.py @@ -17,8 +17,8 @@ # under the License. from __future__ import annotations -from collections.abc import Iterable, Mapping -from typing import Any, Callable, overload +from collections.abc import Callable, Iterable, Mapping +from typing import Any, overload from vertica_python import connect diff --git a/providers/weaviate/pyproject.toml b/providers/weaviate/pyproject.toml index 0ba26fded488d..cf91301c010b5 100644 --- a/providers/weaviate/pyproject.toml +++ b/providers/weaviate/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/weaviate/src/airflow/providers/weaviate/hooks/weaviate.py b/providers/weaviate/src/airflow/providers/weaviate/hooks/weaviate.py index 267f6848eb445..d9e451d2aa7d0 100644 --- a/providers/weaviate/src/airflow/providers/weaviate/hooks/weaviate.py +++ b/providers/weaviate/src/airflow/providers/weaviate/hooks/weaviate.py @@ -36,7 +36,8 @@ from airflow.hooks.base import BaseHook if TYPE_CHECKING: - from typing import Callable, Literal + from collections.abc import Callable + from typing import Literal import pandas as pd from weaviate.auth import AuthCredentials diff --git a/providers/yandex/pyproject.toml b/providers/yandex/pyproject.toml index 4b461eac85835..1eb44bd7219b9 100644 --- a/providers/yandex/pyproject.toml +++ b/providers/yandex/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/ydb/pyproject.toml b/providers/ydb/pyproject.toml index 5f4b1e00b9d09..3ec2d7229d181 100644 --- a/providers/ydb/pyproject.toml +++ b/providers/ydb/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/providers/zendesk/pyproject.toml b/providers/zendesk/pyproject.toml index 4d31bd828fd57..775322e61c0f7 100644 --- a/providers/zendesk/pyproject.toml +++ b/providers/zendesk/pyproject.toml @@ -44,13 +44,12 @@ classifiers = [ "Framework :: Apache Airflow", "Framework :: Apache Airflow :: Provider", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] -requires-python = "~=3.9" +requires-python = "~=3.10" # The dependencies should be modified in place in the generated file. # Any change in the dependencies is preserved when the file is regenerated diff --git a/pyproject.toml b/pyproject.toml index 9453edda31eba..689b2dd788545 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,7 +34,7 @@ name = "apache-airflow" description = "Programmatically author, schedule and monitor data pipelines" readme = { file = "generated/PYPI_README.md", content-type = "text/markdown" } license-files.globs = ["LICENSE"] -requires-python = "~=3.9,<3.13" +requires-python = "~=3.10,<3.13" authors = [ { name = "Apache Software Foundation", email = "dev@airflow.apache.org" }, ] @@ -50,7 +50,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", @@ -557,11 +556,11 @@ bypass-selection = true # Needed until https://github.com/astral-sh/ruff/issues/8237 is available. [tool.black] line-length = 110 -target-version = ['py39', 'py310', 'py311', 'py312'] +target-version = ['py310', 'py311', 'py312'] ## ruff settings ## [tool.ruff] -target-version = "py39" +target-version = "py310" line-length = 110 extend-exclude = [ ".eggs", diff --git a/scripts/ci/pre_commit/chart_schema.py b/scripts/ci/pre_commit/chart_schema.py index de720573f7ef3..5257fcca54c49 100755 --- a/scripts/ci/pre_commit/chart_schema.py +++ b/scripts/ci/pre_commit/chart_schema.py @@ -58,7 +58,7 @@ def walk(value, path="$"): if isinstance(value, dict): for k, v in value.items(): yield from walk(v, path + f"[{k!r}]") - elif isinstance(value, (list, set, tuple)): + elif isinstance(value, list | set | tuple): for no, v in enumerate(value): yield from walk(v, path + f"[{no}]") diff --git a/scripts/ci/pre_commit/check_deprecations.py b/scripts/ci/pre_commit/check_deprecations.py index c1fb6354c9a91..f20d2c55e0459 100755 --- a/scripts/ci/pre_commit/check_deprecations.py +++ b/scripts/ci/pre_commit/check_deprecations.py @@ -138,7 +138,7 @@ def built_import(import_clause: ast.Import) -> list[str]: def found_compatible_decorators(mod: ast.Module) -> tuple[str, ...]: result = [] for node in mod.body: - if not isinstance(node, (ast.ImportFrom, ast.Import)): + if not isinstance(node, ast.ImportFrom | ast.Import): continue result.extend(built_import_from(node) if isinstance(node, ast.ImportFrom) else built_import(node)) return tuple(sorted(set(result))) @@ -193,7 +193,7 @@ def check_decorators(mod: ast.Module, file: str, file_group: str) -> int: category_value_ast = category_keyword.value warns_types = allowed_warnings[file_group] - if isinstance(category_value_ast, (ast.Name, ast.Attribute)): + if isinstance(category_value_ast, ast.Name | ast.Attribute): category_value = resolve_name(category_value_ast) if not any(cv.endswith(category_value) for cv in warns_types): errors += 1 diff --git a/scripts/ci/pre_commit/common_precommit_utils.py b/scripts/ci/pre_commit/common_precommit_utils.py index e0b965ad0f016..6b0a2511dccec 100644 --- a/scripts/ci/pre_commit/common_precommit_utils.py +++ b/scripts/ci/pre_commit/common_precommit_utils.py @@ -41,7 +41,7 @@ # Here we should add the second level paths that we want to have sub-packages in KNOWN_SECOND_LEVEL_PATHS = ["apache", "atlassian", "common", "cncf", "dbt", "microsoft"] -DEFAULT_PYTHON_MAJOR_MINOR_VERSION = "3.9" +DEFAULT_PYTHON_MAJOR_MINOR_VERSION = "3.10" try: from rich.console import Console @@ -277,12 +277,12 @@ def validate_cmd_result(cmd_result, include_ci_env_check=False): "\n[yellow]If you see strange stacktraces above, especially about missing imports " "run this command:[/]\n" ) - console.print("[magenta]breeze ci-image build --python 3.9 --upgrade-to-newer-dependencies[/]\n") + console.print("[magenta]breeze ci-image build --python 3.10 --upgrade-to-newer-dependencies[/]\n") elif cmd_result.returncode != 0: console.print( "[warning]\nIf you see strange stacktraces above, " - "run `breeze ci-image build --python 3.9` and try again." + "run `breeze ci-image build --python 3.10` and try again." ) sys.exit(cmd_result.returncode) diff --git a/scripts/ci/pre_commit/mypy.py b/scripts/ci/pre_commit/mypy.py index 0beed5ee4ec77..90e3ae4379325 100755 --- a/scripts/ci/pre_commit/mypy.py +++ b/scripts/ci/pre_commit/mypy.py @@ -65,6 +65,6 @@ "[yellow]If you see strange stacktraces above, and can't reproduce it, please run" " this command and try again:\n" ) - console.print(f"breeze ci-image build --python 3.9{flag}\n") + console.print(f"breeze ci-image build --python 3.10{flag}\n") console.print("[yellow]You can also run `breeze down --cleanup-mypy-cache` to clean up the cache used.\n") sys.exit(res.returncode) diff --git a/scripts/ci/pre_commit/mypy_folder.py b/scripts/ci/pre_commit/mypy_folder.py index d3ca454e38524..d40542374cec7 100755 --- a/scripts/ci/pre_commit/mypy_folder.py +++ b/scripts/ci/pre_commit/mypy_folder.py @@ -156,6 +156,6 @@ def get_all_files(folder: str) -> list[str]: "[yellow]If you see strange stacktraces above, and can't reproduce it, please run" " this command and try again:\n" ) - console.print(f"breeze ci-image build --python 3.9{flag}\n") + console.print(f"breeze ci-image build --python 3.10{flag}\n") console.print("[yellow]You can also run `breeze down --cleanup-mypy-cache` to clean up the cache used.\n") sys.exit(res.returncode) diff --git a/scripts/ci/testing/summarize_captured_warnings.py b/scripts/ci/testing/summarize_captured_warnings.py index ad834be37b32f..1597c47783542 100755 --- a/scripts/ci/testing/summarize_captured_warnings.py +++ b/scripts/ci/testing/summarize_captured_warnings.py @@ -23,11 +23,11 @@ import json import os import shutil -from collections.abc import Iterable, Iterator +from collections.abc import Callable, Iterable, Iterator from dataclasses import asdict, dataclass, fields from itertools import groupby from pathlib import Path -from typing import Any, Callable +from typing import Any from uuid import NAMESPACE_OID, uuid5 if __name__ not in ("__main__", "__mp_main__"): diff --git a/scripts/in_container/install_airflow_and_providers.py b/scripts/in_container/install_airflow_and_providers.py index 05a437178b9c3..1896d1865c3de 100755 --- a/scripts/in_container/install_airflow_and_providers.py +++ b/scripts/in_container/install_airflow_and_providers.py @@ -542,7 +542,7 @@ def find_installation_spec( ) @click.option( "--python-version", - default="3.9", + default="3.10", envvar="PYTHON_MAJOR_MINOR_VERSION", show_default=True, help="Python version to use", diff --git a/scripts/in_container/run_generate_constraints.py b/scripts/in_container/run_generate_constraints.py index edb8de1df96ad..58cf5cb6afc00 100755 --- a/scripts/in_container/run_generate_constraints.py +++ b/scripts/in_container/run_generate_constraints.py @@ -33,7 +33,7 @@ from in_container_utils import AIRFLOW_DIST_PATH, AIRFLOW_ROOT_PATH, click, console, run_command DEFAULT_BRANCH = os.environ.get("DEFAULT_BRANCH", "main") -PYTHON_VERSION = os.environ.get("PYTHON_MAJOR_MINOR_VERSION", "3.9") +PYTHON_VERSION = os.environ.get("PYTHON_MAJOR_MINOR_VERSION", "3.10") GENERATED_PROVIDER_DEPENDENCIES_FILE = AIRFLOW_ROOT_PATH / "generated" / "provider_dependencies.json" ALL_PROVIDER_DEPENDENCIES = json.loads(GENERATED_PROVIDER_DEPENDENCIES_FILE.read_text()) diff --git a/scripts/in_container/run_migration_reference.py b/scripts/in_container/run_migration_reference.py index bdf7bf3b7667b..bc92e7766ae15 100755 --- a/scripts/in_container/run_migration_reference.py +++ b/scripts/in_container/run_migration_reference.py @@ -58,7 +58,7 @@ def wrap_backticks(val): def _wrap_backticks(x): return f"``{x}``" - return ",\n".join(map(_wrap_backticks, val)) if isinstance(val, (tuple, list)) else _wrap_backticks(val) + return ",\n".join(map(_wrap_backticks, val)) if isinstance(val, tuple | list) else _wrap_backticks(val) def update_doc(file, data, app): diff --git a/scripts/in_container/run_provider_yaml_files_check.py b/scripts/in_container/run_provider_yaml_files_check.py index 3dab9611ea508..e267fe43bb01c 100755 --- a/scripts/in_container/run_provider_yaml_files_check.py +++ b/scripts/in_container/run_provider_yaml_files_check.py @@ -29,10 +29,10 @@ import textwrap import warnings from collections import Counter -from collections.abc import Iterable +from collections.abc import Callable, Iterable from enum import Enum from functools import cache -from typing import Any, Callable +from typing import Any import jsonschema import yaml diff --git a/task-sdk/src/airflow/sdk/definitions/dag.py b/task-sdk/src/airflow/sdk/definitions/dag.py index 555207444db2e..c0a4230377449 100644 --- a/task-sdk/src/airflow/sdk/definitions/dag.py +++ b/task-sdk/src/airflow/sdk/definitions/dag.py @@ -91,7 +91,7 @@ DagStateChangeCallback = Callable[[Context], None] -ScheduleInterval = Union[None, str, timedelta, relativedelta] +ScheduleInterval = None | str | timedelta | relativedelta ScheduleArg = Union[ScheduleInterval, Timetable, BaseAsset, Collection[BaseAsset]]