Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 0 additions & 6 deletions .github/workflows/run-unit-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -89,11 +89,6 @@ on: # yamllint disable-line rule:truthy
required: false
default: "false"
type: string
pydantic:
description: "The version of pydantic to use"
required: false
default: "v2"
type: string
downgrade-pendulum:
description: "Whether to downgrade pendulum or not (true/false)"
required: false
Expand Down Expand Up @@ -148,7 +143,6 @@ jobs:
JOB_ID: "${{ inputs.test-scope }}-${{ inputs.test-name }}-${{inputs.backend}}-${{ matrix.backend-version }}-${{ matrix.python-version }}"
MOUNT_SOURCES: "skip"
PARALLEL_TEST_TYPES: "${{ inputs.parallel-test-types-list-as-string }}"
PYDANTIC: "${{ inputs.pydantic }}"
PYTHON_MAJOR_MINOR_VERSION: "${{ matrix.python-version }}"
UPGRADE_BOTO: "${{ inputs.upgrade-boto }}"
AIRFLOW_MONITOR_DELAY_TIME_IN_SECONDS: "${{inputs.monitor-delay-time-in-seconds}}"
Expand Down
44 changes: 0 additions & 44 deletions .github/workflows/special-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -105,50 +105,6 @@ jobs:
run-coverage: ${{ inputs.run-coverage }}
debug-resources: ${{ inputs.debug-resources }}

tests-pydantic-v1:
name: "Pydantic v1 test"
uses: ./.github/workflows/run-unit-tests.yml
permissions:
contents: read
packages: read
secrets: inherit
with:
runs-on-as-json-default: ${{ inputs.runs-on-as-json-default }}
pydantic: "v1"
test-name: "Pydantic-V1-Postgres"
test-scope: "All"
backend: "postgres"
image-tag: ${{ inputs.image-tag }}
python-versions: "['${{ inputs.default-python-version }}']"
backend-versions: "['${{ inputs.default-postgres-version }}']"
excludes: "[]"
parallel-test-types-list-as-string: ${{ inputs.parallel-test-types-list-as-string }}
include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }}
run-coverage: ${{ inputs.run-coverage }}
debug-resources: ${{ inputs.debug-resources }}

tests-pydantic-none:
name: "Pydantic removed test"
uses: ./.github/workflows/run-unit-tests.yml
permissions:
contents: read
packages: read
secrets: inherit
with:
runs-on-as-json-default: ${{ inputs.runs-on-as-json-default }}
pydantic: "none"
test-name: "Pydantic-Removed-Postgres"
test-scope: "All"
backend: "postgres"
image-tag: ${{ inputs.image-tag }}
python-versions: "['${{ inputs.default-python-version }}']"
backend-versions: "['${{ inputs.default-postgres-version }}']"
excludes: "[]"
parallel-test-types-list-as-string: ${{ inputs.parallel-test-types-list-as-string }}
include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }}
run-coverage: ${{ inputs.run-coverage }}
debug-resources: ${{ inputs.debug-resources }}

tests-pendulum-2:
name: "Pendulum2 test"
uses: ./.github/workflows/run-unit-tests.yml
Expand Down
41 changes: 0 additions & 41 deletions Dockerfile.ci
Original file line number Diff line number Diff line change
Expand Up @@ -1032,46 +1032,6 @@ function check_boto_upgrade() {
pip check
}

function check_pydantic() {
if [[ ${PYDANTIC=} == "none" ]]; then
echo
echo "${COLOR_YELLOW}Reinstalling airflow from local sources to account for pyproject.toml changes${COLOR_RESET}"
echo
# shellcheck disable=SC2086
${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} -e .
echo
echo "${COLOR_YELLOW}Remove pydantic and 3rd party libraries that depend on it${COLOR_RESET}"
echo
# shellcheck disable=SC2086
${PACKAGING_TOOL_CMD} uninstall ${EXTRA_UNINSTALL_FLAGS} pydantic aws-sam-translator openai \
pyiceberg qdrant-client cfn-lint weaviate-client google-cloud-aiplatform
pip check
elif [[ ${PYDANTIC=} == "v1" ]]; then
echo
echo "${COLOR_YELLOW}Reinstalling airflow from local sources to account for pyproject.toml changes${COLOR_RESET}"
echo
# shellcheck disable=SC2086
${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} -e .
echo
echo "${COLOR_YELLOW}Uninstalling dependencies which are not compatible with Pydantic 1${COLOR_RESET}"
echo
# shellcheck disable=SC2086
${PACKAGING_TOOL_CMD} uninstall ${EXTRA_UNINSTALL_FLAGS} pyiceberg weaviate-client
echo
echo "${COLOR_YELLOW}Downgrading Pydantic to < 2${COLOR_RESET}"
echo
# Pydantic 1.10.17/1.10.15 conflicts with aws-sam-translator so we need to exclude it
# shellcheck disable=SC2086
${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} --upgrade "pydantic<2.0.0,!=1.10.17,!=1.10.15"
pip check
else
echo
echo "${COLOR_BLUE}Leaving default pydantic v2${COLOR_RESET}"
echo
fi
}


function check_downgrade_sqlalchemy() {
if [[ ${DOWNGRADE_SQLALCHEMY=} != "true" ]]; then
return
Expand Down Expand Up @@ -1181,7 +1141,6 @@ function check_force_lowest_dependencies() {
determine_airflow_to_use
environment_initialization
check_boto_upgrade
check_pydantic
check_downgrade_sqlalchemy
check_downgrade_pendulum
check_force_lowest_dependencies
Expand Down
4 changes: 2 additions & 2 deletions INSTALL
Original file line number Diff line number Diff line change
Expand Up @@ -257,8 +257,8 @@ Those extras are available as regular core airflow extras - they install optiona
# START CORE EXTRAS HERE

aiobotocore, apache-atlas, apache-webhdfs, async, cgroups, cloudpickle, github-enterprise, google-
auth, graphviz, kerberos, ldap, leveldb, otel, pandas, password, pydantic, rabbitmq, s3fs, saml,
sentry, statsd, uv, virtualenv
auth, graphviz, kerberos, ldap, leveldb, otel, pandas, password, rabbitmq, s3fs, saml, sentry,
statsd, uv, virtualenv

# END CORE EXTRAS HERE

Expand Down
5 changes: 2 additions & 3 deletions airflow/operators/python.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,6 @@
from airflow.utils.file import get_unique_dag_module_name
from airflow.utils.operator_helpers import ExecutionCallableRunner, KeywordParameters
from airflow.utils.process_utils import execute_in_subprocess
from airflow.utils.pydantic import is_pydantic_2_installed
from airflow.utils.python_virtualenv import prepare_virtualenv, write_python_script
from airflow.utils.session import create_session

Expand Down Expand Up @@ -549,8 +548,8 @@ def _execute_python_callable_in_subprocess(self, python_path: Path):
self._write_args(input_path)
self._write_string_args(string_args_path)

if self.use_airflow_context and (not is_pydantic_2_installed() or not _ENABLE_AIP_44):
error_msg = "`get_current_context()` needs to be used with Pydantic 2 and AIP-44 enabled."
if self.use_airflow_context and not _ENABLE_AIP_44:
error_msg = "`get_current_context()` needs to be used with AIP-44 enabled."
raise AirflowException(error_msg)

jinja_context = {
Expand Down
10 changes: 5 additions & 5 deletions airflow/serialization/pydantic/dag.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,17 +20,17 @@
from datetime import datetime
from typing import Any, List, Optional

from typing_extensions import Annotated

from airflow import DAG, settings
from airflow.configuration import conf as airflow_conf
from airflow.utils.pydantic import (
from pydantic import (
BaseModel as BaseModelPydantic,
ConfigDict,
PlainSerializer,
PlainValidator,
ValidationInfo,
)
from typing_extensions import Annotated

from airflow import DAG, settings
from airflow.configuration import conf as airflow_conf


def serialize_operator(x: DAG) -> dict:
Expand Down
6 changes: 3 additions & 3 deletions airflow/serialization/pydantic/dag_run.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,11 @@
from datetime import datetime
from typing import TYPE_CHECKING, Iterable, List, Optional

from pydantic import BaseModel as BaseModelPydantic, ConfigDict

from airflow.models.dagrun import DagRun
from airflow.serialization.pydantic.dag import PydanticDag
from airflow.serialization.pydantic.dataset import DatasetEventPydantic
from airflow.utils.pydantic import BaseModel as BaseModelPydantic, ConfigDict, is_pydantic_2_installed

if TYPE_CHECKING:
from sqlalchemy.orm import Session
Expand Down Expand Up @@ -111,5 +112,4 @@ def get_log_template(self, session: Session):
return DagRun._get_log_template(log_template_id=self.log_template_id)


if is_pydantic_2_installed():
DagRunPydantic.model_rebuild()
DagRunPydantic.model_rebuild()
2 changes: 1 addition & 1 deletion airflow/serialization/pydantic/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from datetime import datetime
from typing import List, Optional

from airflow.utils.pydantic import BaseModel as BaseModelPydantic, ConfigDict
from pydantic import BaseModel as BaseModelPydantic, ConfigDict


class DagScheduleDatasetReferencePydantic(BaseModelPydantic):
Expand Down
3 changes: 2 additions & 1 deletion airflow/serialization/pydantic/job.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,10 @@
from functools import cached_property
from typing import TYPE_CHECKING, Optional

from pydantic import BaseModel as BaseModelPydantic, ConfigDict

from airflow.executors.executor_loader import ExecutorLoader
from airflow.jobs.base_job_runner import BaseJobRunner
from airflow.utils.pydantic import BaseModel as BaseModelPydantic, ConfigDict


def check_runner_initialized(job_runner: Optional[BaseJobRunner], job_type: str) -> BaseJobRunner:
Expand Down
18 changes: 8 additions & 10 deletions airflow/serialization/pydantic/taskinstance.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,12 @@
from datetime import datetime
from typing import TYPE_CHECKING, Any, Iterable, Optional

from pydantic import (
BaseModel as BaseModelPydantic,
ConfigDict,
PlainSerializer,
PlainValidator,
)
from typing_extensions import Annotated

from airflow.exceptions import AirflowRescheduleException, TaskDeferred
Expand All @@ -36,22 +42,15 @@
from airflow.serialization.pydantic.dag_run import DagRunPydantic
from airflow.utils.log.logging_mixin import LoggingMixin
from airflow.utils.net import get_hostname
from airflow.utils.pydantic import (
BaseModel as BaseModelPydantic,
ConfigDict,
PlainSerializer,
PlainValidator,
is_pydantic_2_installed,
)
from airflow.utils.xcom import XCOM_RETURN_KEY

if TYPE_CHECKING:
import pendulum
from pydantic import ValidationInfo
from sqlalchemy.orm import Session

from airflow.models.dagrun import DagRun
from airflow.utils.context import Context
from airflow.utils.pydantic import ValidationInfo
from airflow.utils.state import DagRunState


Expand Down Expand Up @@ -552,5 +551,4 @@ def get_relevant_upstream_map_indexes(
)


if is_pydantic_2_installed():
TaskInstancePydantic.model_rebuild()
TaskInstancePydantic.model_rebuild()
2 changes: 1 addition & 1 deletion airflow/serialization/pydantic/tasklog.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
# under the License.
from datetime import datetime

from airflow.utils.pydantic import BaseModel as BaseModelPydantic, ConfigDict
from pydantic import BaseModel as BaseModelPydantic, ConfigDict


class LogTemplatePydantic(BaseModelPydantic):
Expand Down
3 changes: 2 additions & 1 deletion airflow/serialization/pydantic/trigger.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,9 @@
import datetime
from typing import Any, Dict, Optional

from pydantic import BaseModel as BaseModelPydantic, ConfigDict

from airflow.utils import timezone
from airflow.utils.pydantic import BaseModel as BaseModelPydantic, ConfigDict


class TriggerPydantic(BaseModelPydantic):
Expand Down
7 changes: 4 additions & 3 deletions airflow/serialization/serialized_objects.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,14 +96,15 @@
if TYPE_CHECKING:
from inspect import Parameter

from pydantic import BaseModel

from airflow.models.baseoperatorlink import BaseOperatorLink
from airflow.models.expandinput import ExpandInput
from airflow.models.operator import Operator
from airflow.models.taskmixin import DAGNode
from airflow.serialization.json_schema import Validator
from airflow.ti_deps.deps.base_ti_dep import BaseTIDep
from airflow.timetables.base import Timetable
from airflow.utils.pydantic import BaseModel

HAS_KUBERNETES: bool
try:
Expand Down Expand Up @@ -364,8 +365,8 @@ def encode_start_trigger_args(var: StartTriggerArgs) -> dict[str, Any]:

:meta private:
"""
serialize_kwargs = (
lambda key: BaseSerialization.serialize(getattr(var, key)) if getattr(var, key) is not None else None
serialize_kwargs = lambda key: (
BaseSerialization.serialize(getattr(var, key)) if getattr(var, key) is not None else None
)
return {
"__type": "START_TRIGGER_ARGS",
Expand Down
61 changes: 0 additions & 61 deletions airflow/utils/pydantic.py

This file was deleted.

4 changes: 2 additions & 2 deletions contributing-docs/12_airflow_dependencies_and_extras.rst
Original file line number Diff line number Diff line change
Expand Up @@ -165,8 +165,8 @@ Those extras are available as regular core airflow extras - they install optiona
.. START CORE EXTRAS HERE

aiobotocore, apache-atlas, apache-webhdfs, async, cgroups, cloudpickle, github-enterprise, google-
auth, graphviz, kerberos, ldap, leveldb, otel, pandas, password, pydantic, rabbitmq, s3fs, saml,
sentry, statsd, uv, virtualenv
auth, graphviz, kerberos, ldap, leveldb, otel, pandas, password, rabbitmq, s3fs, saml, sentry,
statsd, uv, virtualenv

.. END CORE EXTRAS HERE

Expand Down
Loading