Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
20 changes: 17 additions & 3 deletions .github/actions/migration_tests/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,10 @@
---
name: 'Run migration tests'
description: 'Runs migration tests'
inputs:
python-version:
description: "Python version to run the tests on"
required: true
runs:
using: "composite"
steps:
Expand All @@ -38,12 +42,18 @@ runs:
airflow db migrate --to-revision heads &&
airflow db downgrade -n 2.7.0 -y &&
airflow db migrate
if: env.BACKEND != 'sqlite'
# migration tests cannot be run with Python 3.13 now - currently we have no FAB and no FABDBManager -
# and airflow (correctly) refuses to migrate things to Airflow 2 when there is no "ab_user"
# table created. So migration tests for now will have to be excluded for Python 3.13 until
# we start working on 3.2 (with migration to 3.1) or until FAB is supported in 3.13 (FAB 5)
# TODO(potiuk) bring migration tests back for Python 3.13 when one of the two conditions are fulfilled
if: env.BACKEND != 'sqlite' && inputs.python-version != '3.13'
- name: "Bring composer down"
shell: bash
run: breeze down
env:
COMPOSE_PROJECT_NAME: "docker-compose"
if: inputs.python-version != '3.13'
- name: "Test ORM migration 2 to 3: ${{env.BACKEND}}"
shell: bash
run: >
Expand All @@ -60,12 +70,13 @@ runs:
airflow db migrate --to-revision heads &&
airflow db downgrade -n 2.7.0 -y &&
airflow db migrate
if: env.BACKEND != 'sqlite'
if: env.BACKEND != 'sqlite' && inputs.python-version != '3.13'
- name: "Bring compose down again"
shell: bash
run: breeze down
env:
COMPOSE_PROJECT_NAME: "docker-compose"
if: inputs.python-version != '3.13'
- name: "Test ORM migration ${{env.BACKEND}}"
shell: bash
run: >
Expand All @@ -77,11 +88,13 @@ runs:
env:
COMPOSE_PROJECT_NAME: "docker-compose"
DB_MANAGERS: "airflow.providers.fab.auth_manager.models.db.FABDBManager"
if: inputs.python-version != '3.13'
- name: "Bring compose down again"
shell: bash
run: breeze down
env:
COMPOSE_PROJECT_NAME: "docker-compose"
if: inputs.python-version != '3.13'
- name: "Test offline migration ${{env.BACKEND}}"
shell: bash
run: >
Expand All @@ -93,12 +106,13 @@ runs:
env:
COMPOSE_PROJECT_NAME: "docker-compose"
DB_MANAGERS: "airflow.providers.fab.auth_manager.models.db.FABDBManager"
if: env.BACKEND != 'sqlite'
if: env.BACKEND != 'sqlite' && inputs.python-version != '3.13'
- name: "Bring any containers left down"
shell: bash
run: breeze down
env:
COMPOSE_PROJECT_NAME: "docker-compose"
if: inputs.python-version != '3.13'
- name: "Dump logs on failure ${{env.BACKEND}}"
shell: bash
run: docker ps -q | xargs docker logs
Expand Down
4 changes: 3 additions & 1 deletion .github/actions/post_tests_success/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,9 @@ runs:
uses: codecov/codecov-action@b9fd7d16f6d7d1b5d2bec1a2887e65ceed900238
env:
CODECOV_TOKEN: ${{ inputs.codecov-token }}
if: env.ENABLE_COVERAGE == 'true' && env.TEST_TYPES != 'Helm' && inputs.python-version != '3.12'
if: >
env.ENABLE_COVERAGE == 'true' && env.TEST_TYPES != 'Helm' && inputs.python-version != '3.12'
&& inputs.python-version != '3.13'
with:
name: coverage-${{env.JOB_ID}}
flags: python-${{ env.PYTHON_MAJOR_MINOR_VERSION }},${{ env.BACKEND }}-${{ env.BACKEND_VERSION }}
Expand Down
6 changes: 6 additions & 0 deletions .github/actions/prepare_all_ci_images/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -61,3 +61,9 @@ runs:
platform: ${{ inputs.platform }}
python: "3.12"
python-versions-list-as-string: ${{ inputs.python-versions-list-as-string }}
- name: "Restore CI docker image ${{ inputs.platform }}:3.13"
uses: ./.github/actions/prepare_single_ci_image
with:
platform: ${{ inputs.platform }}
python: "3.13"
python-versions-list-as-string: ${{ inputs.python-versions-list-as-string }}
7 changes: 6 additions & 1 deletion .github/workflows/run-unit-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -125,8 +125,11 @@ permissions:
jobs:
tests:
timeout-minutes: 65
# yamllint disable rule:line-length
name: "\
${{ inputs.test-scope }}-${{ inputs.test-group }}:\
${{ inputs.test-scope == 'All' && '' || inputs.test-scope == 'Quarantined' && 'Qrnt' || inputs.test-scope }}\
${{ inputs.test-scope == 'All' && '' || '-' }}\
${{ inputs.test-group == 'providers' && 'prov' || inputs.test-group}}:\
${{ inputs.test-name }}${{ inputs.test-name-separator }}${{ matrix.backend-version }}:\
${{ matrix.python-version}}:${{ matrix.test-types.description }}"
runs-on: ${{ fromJSON(inputs.runners) }}
Expand Down Expand Up @@ -179,6 +182,8 @@ jobs:
- name: >
Migration Tests: ${{ matrix.python-version }}:${{ env.PARALLEL_TEST_TYPES }}
uses: ./.github/actions/migration_tests
with:
python-version: ${{ matrix.python-version }}
if: inputs.run-migration-tests == 'true' && inputs.test-group == 'core'
- name: >
${{ inputs.test-group }}:${{ inputs.test-scope }} Tests ${{ inputs.test-name }} ${{ matrix.backend-version }}
Expand Down
13 changes: 9 additions & 4 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -513,7 +513,7 @@ repos:
entry: ./scripts/ci/pre_commit/check_common_sql_dependency.py
language: python
files: ^providers/.*/src/airflow/providers/.*/hooks/.*\.py$
additional_dependencies: ['rich>=12.4.4', 'pyyaml>=6.0.2', 'packaging>=23.2']
additional_dependencies: ['rich>=12.4.4', 'pyyaml>=6.0.2', 'packaging>=25']
- id: check-extra-packages-references
name: Checks setup extra packages
description: Checks if all the extras defined in hatch_build.py are listed in extra-packages-ref.rst file
Expand Down Expand Up @@ -576,7 +576,7 @@ repos:
^providers/fab/src/airflow/providers/fab/migrations/versions/.*$|^providers/fab/src/airflow/providers/fab/migrations/versions|
^airflow-core/src/airflow/utils/db\.py$|
^providers/fab/src/airflow/providers/fab/auth_manager/models/db\.py$
additional_dependencies: ['packaging>=23.2', 'rich>=12.4.4']
additional_dependencies: ['packaging>=25', 'rich>=12.4.4']
- id: update-version
name: Update versions in docs
entry: ./scripts/ci/pre_commit/update_versions.py
Expand Down Expand Up @@ -1014,10 +1014,15 @@ repos:
name: Update Airflow's meta-package pyproject.toml
language: python
entry: ./scripts/ci/pre_commit/update_airflow_pyproject_toml.py
files: ^.*/pyproject\.toml$|^scripts/ci/pre_commit/update_airflow_pyproject_toml\.py$
files: >
(?x)
^.*/pyproject\.toml$|
^scripts/ci/pre_commit/update_airflow_pyproject_toml\.py$|
^providers/.*/pyproject\.toml$|
^providers/.*/provider\.yaml$
pass_filenames: false
require_serial: true
additional_dependencies: ['rich>=12.4.4', 'tomli>=2.0.1', 'packaging>=23.2' ]
additional_dependencies: ['rich>=12.4.4', 'tomli>=2.0.1', 'packaging>=25' ]
- id: update-reproducible-source-date-epoch
name: Update Source Date Epoch for reproducible builds
language: python
Expand Down
23 changes: 12 additions & 11 deletions Dockerfile.ci
Original file line number Diff line number Diff line change
Expand Up @@ -871,10 +871,15 @@ EOF
# The content below is automatically copied from scripts/docker/entrypoint_ci.sh
COPY <<"EOF" /entrypoint_ci.sh
#!/usr/bin/env bash
if [[ ${VERBOSE_COMMANDS:="false"} == "true" ]]; then
set -x
fi
function set_verbose() {
if [[ ${VERBOSE_COMMANDS:="false"} == "true" ]]; then
set -x
else
set +x
fi
}

set_verbose
. "${AIRFLOW_SOURCES:-/opt/airflow}"/scripts/in_container/_in_container_script_init.sh

LD_PRELOAD="/usr/lib/$(uname -m)-linux-gnu/libstdc++.so.6"
Expand Down Expand Up @@ -1103,12 +1108,10 @@ function check_boto_upgrade() {
echo
echo "${COLOR_BLUE}Upgrading boto3, botocore to latest version to run Amazon tests with them${COLOR_RESET}"
echo
set -x
# shellcheck disable=SC2086
${PACKAGING_TOOL_CMD} uninstall ${EXTRA_UNINSTALL_FLAGS} aiobotocore s3fs || true
# shellcheck disable=SC2086
${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} --upgrade "boto3<1.38.3" "botocore<1.38.3"
set +x
}

function check_upgrade_sqlalchemy() {
Expand All @@ -1118,10 +1121,8 @@ function check_upgrade_sqlalchemy() {
echo
echo "${COLOR_BLUE}Upgrading sqlalchemy to the latest version to run tests with it${COLOR_RESET}"
echo
set -x
# shellcheck disable=SC2086
${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} --upgrade "sqlalchemy[asyncio]<2.1" "databricks-sqlalchemy>=2"
set +x
}

function check_downgrade_sqlalchemy() {
Expand Down Expand Up @@ -1185,7 +1186,7 @@ function check_force_lowest_dependencies() {
# --no-binary is needed in order to avoid libxml and xmlsec using different version of libxml2
# (binary lxml embeds its own libxml2, while xmlsec uses system one).
# See https://bugs.launchpad.net/lxml/+bug/2110068
uv sync --resolution lowest-direct --no-binary-package lxml --no-binary-package xmlsec
uv sync --resolution lowest-direct --no-binary-package lxml --no-binary-package xmlsec --all-extras
else
echo
echo "${COLOR_BLUE}Forcing dependencies to lowest versions for Airflow.${COLOR_RESET}"
Expand All @@ -1194,7 +1195,7 @@ function check_force_lowest_dependencies() {
# --no-binary is needed in order to avoid libxml and xmlsec using different version of libxml2
# (binary lxml embeds its own libxml2, while xmlsec uses system one).
# See https://bugs.launchpad.net/lxml/+bug/2110068
uv sync --resolution lowest-direct --no-binary-package lxml --no-binary-package xmlsec
uv sync --resolution lowest-direct --no-binary-package lxml --no-binary-package xmlsec --all-extras
fi
}

Expand Down Expand Up @@ -1474,8 +1475,8 @@ ENV AIRFLOW_PIP_VERSION=${AIRFLOW_PIP_VERSION} \
UV_LINK_MODE=copy \
AIRFLOW_PRE_COMMIT_VERSION=${AIRFLOW_PRE_COMMIT_VERSION}

# The PATH is needed for PIPX to find the tools installed
ENV PATH="/root/.local/bin:${PATH}"
# The PATH is needed for PIPX to find the tools installed and cargo to build the wheels
ENV PATH="/root/.local/bin:/root/.cargo/bin:${PATH}"

# Useful for creating a cache id based on the underlying architecture, preventing the use of cached python packages from
# an incorrect architecture.
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ Apache Airflow is tested with:

| | Main version (dev) | Stable version (3.0.3) |
|------------|------------------------|------------------------|
| Python | 3.10, 3.11, 3.12 | 3.9, 3.10, 3.11, 3.12 |
| Python | 3.10, 3.11, 3.12, 3.13 | 3.9, 3.10, 3.11, 3.12 |
| Platform | AMD64/ARM64(\*) | AMD64/ARM64(\*) |
| Kubernetes | 1.30, 1.31, 1.32, 1.33 | 1.30, 1.31, 1.32, 1.33 |
| PostgreSQL | 13, 14, 15, 16, 17 | 13, 14, 15, 16, 17 |
Expand Down
2 changes: 1 addition & 1 deletion airflow-core/docs/installation/prerequisites.rst
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ Prerequisites

Airflow® is tested with:

* Python: 3.10, 3.11, 3.12
* Python: 3.10, 3.11, 3.12, 3.13

* Databases:

Expand Down
2 changes: 1 addition & 1 deletion airflow-core/docs/start.rst
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ This quick start guide will help you bootstrap an Airflow standalone instance on

.. note::

Successful installation requires a Python 3 environment. Starting with Airflow 3.1.0, Airflow supports Python 3.10, 3.11, and 3.12.
Successful installation requires a Python 3 environment. Starting with Airflow 3.1.0, Airflow supports Python 3.10, 3.11, 3.12, 3.13.

Officially supported installation methods is with``pip`.

Expand Down
33 changes: 17 additions & 16 deletions airflow-core/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,12 @@ name = "apache-airflow-core"
description = "Core packages for Apache Airflow, schedule and API server"
readme = { file = "README.md", content-type = "text/markdown" }
license-files.globs = ["LICENSE", "3rd-party-licenses/*.txt", "NOTICE"]
requires-python = ">=3.10,!=3.13"
# We know that it will take a while before we can support Python 3.14 because of all our dependencies
# It takes about 4-7 months after Python release before we can support it, so we limit it to <3.14
# proactively. This way we also have a chance to test it with Python 3.14 and bump the upper binding
# and manually mark providers that do not support it yet with !-3.14 - until they support it - which will
# also exclude resolving uv workspace dependencies for those providers.
requires-python = ">=3.10,<3.14"
authors = [
{ name = "Apache Software Foundation", email = "dev@airflow.apache.org" },
]
Expand Down Expand Up @@ -80,14 +85,15 @@ dependencies = [
# 0.115.10 fastapi was a bad release that broke our API's and static checks.
# Related fastapi issue here: https://github.com/fastapi/fastapi/discussions/13431
"fastapi[standard]>=0.115.0,!=0.115.10",
"starlette>=0.45.0",
"httpx>=0.25.0",
'importlib_metadata>=6.5;python_version<"3.12"',
'importlib_metadata>=7.0;python_version>="3.12"',
"itsdangerous>=2.0",
"jinja2>=3.1.5",
"jsonschema>=4.19.1",
"lazy-object-proxy>=1.2.0",
'libcst >=1.1.0',
'libcst >=1.8.2',
"linkify-it-py>=2.0.0",
"lockfile>=0.12.2",
"methodtools>=0.4.7",
Expand All @@ -102,11 +108,10 @@ dependencies = [
# dependency and should be resolved as early as possible.
# This may be removed when future versions of pip are able
# to handle this dependency resolution automatically.
"opentelemetry-proto<9999",
"opentelemetry-proto<9999,>=1.27.0",
"packaging>=25.0",
"pathspec>=0.9.0",
'pendulum>=2.1.2,<4.0;python_version<"3.12"',
'pendulum>=3.0.0,<4.0;python_version>="3.12"',
'pendulum>=3.1.0',
"pluggy>=1.5.0",
"psutil>=5.8.0",
"pydantic>=2.11.0",
Expand All @@ -133,9 +138,7 @@ dependencies = [
"tabulate>=0.9.0",
"tenacity>=8.3.0",
"termcolor>=3.0.0",
# temporarily exclude 4.14.0 due to its broken compat with cadwyn
# See https://github.com/zmievsa/cadwyn/issues/283
"typing-extensions!=4.14.0",
"typing-extensions>=4.14.1",
# Universal Pathlib 0.2.4 adds extra validation for Paths and our integration with local file paths
# Does not work with it Tracked in https://github.com/fsspec/universal_pathlib/issues/276
"universal-pathlib>=0.2.2,!=0.2.4",
Expand All @@ -153,14 +156,14 @@ dependencies = [
[project.optional-dependencies]
"async" = [
"eventlet>=0.37.0",
"gevent>=24.2.1",
"greenlet>=0.4.9",
"gevent>=25.4.1",
"greenlet>=3.1.0",
]
"graphviz" = [
# The graphviz package creates friction when installing on MacOS as it needs graphviz system package to
# be installed, and it's really only used for very obscure features of Airflow, so we can skip it on MacOS
# Instead, if someone attempts to use it on MacOS, they will get explanatory error on how to install it
"graphviz>=0.12; sys_platform != 'darwin'",
"graphviz>=0.20; sys_platform != 'darwin'",
]
"kerberos" = [
"pykerberos>=1.1.13",
Expand All @@ -172,9 +175,8 @@ dependencies = [
]
"sentry" = [
"blinker>=1.1",
# Sentry SDK 1.33 is broken when greenlets are installed and fails to import
# See https://github.com/getsentry/sentry-python/issues/2473
"sentry-sdk>=1.32.0,!=1.33.0",
# Apparently sentry needs flask to be installed to work properly
"sentry-sdk[flask]>=2.30.0",
]
"statsd" = [
"statsd>=3.3.0",
Expand All @@ -199,7 +201,6 @@ Mastodon = "https://fosstodon.org/@airflow"
Bluesky = "https://bsky.app/profile/apache-airflow.bsky.social"
YouTube = "https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/"


[tool.hatch.version]
path = "src/airflow/__init__.py"

Expand Down Expand Up @@ -249,7 +250,7 @@ dev = [
"apache-airflow-providers-amazon",
"apache-airflow-providers-celery",
"apache-airflow-providers-cncf-kubernetes",
"apache-airflow-providers-fab",
"apache-airflow-providers-fab>=2.2.0; python_version < '3.13'",
"apache-airflow-providers-git",
"apache-airflow-providers-ftp",
]
Expand Down
10 changes: 10 additions & 0 deletions airflow-core/src/airflow/api_fastapi/core_api/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

import logging
import os
import sys
import warnings
from pathlib import Path

Expand All @@ -35,6 +36,8 @@

log = logging.getLogger(__name__)

PY313 = sys.version_info >= (3, 13)


def init_views(app: FastAPI) -> None:
"""Init views by registering the different routers."""
Expand Down Expand Up @@ -124,6 +127,13 @@ def init_flask_plugins(app: FastAPI) -> None:
try:
from airflow.providers.fab.www.app import create_app
except ImportError:
if PY313:
log.info(
"Some Airflow 2 plugins have been detected in your environment. Currently FAB provider "
"does not support Python 3.13, so you cannot use Airflow 2 plugins with Airflow 3 until "
"FAB provider will be Python 3.13 compatible."
)
return
raise AirflowException(
"Some Airflow 2 plugins have been detected in your environment. "
"To run them with Airflow 3, you must install the FAB provider in your Airflow environment."
Expand Down
2 changes: 1 addition & 1 deletion airflow-core/src/airflow/models/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@
# the symbols #,!,-,_,.,:,\,/ and () requiring at least one match.
#
# You can try the regex here: https://regex101.com/r/69033B/1
RE_SANITIZE_CONN_ID = re.compile(r"^[\w\#\!\(\)\-\.\:\/\\]{1,}$")
RE_SANITIZE_CONN_ID = re.compile(r"^[\w#!()\-.:/\\]{1,}$")
# the conn ID max len should be 250
CONN_ID_MAX_LEN: int = 250

Expand Down
Loading
Loading