Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .dockerignore
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
!docs
!licenses
!providers/
!task_sdk/
!task-sdk/

# Add those folders to the context so that they are available in the CI container
!scripts
Expand Down
2 changes: 1 addition & 1 deletion .github/boring-cyborg.yml
Original file line number Diff line number Diff line change
Expand Up @@ -414,7 +414,7 @@ labelPRBasedOnFilePath:
- tests/system/**/*

area:task-sdk:
- task_sdk/**/*
- task-sdk/**/*

area:db-migrations:
- airflow/migrations/versions/*
Expand Down
2 changes: 1 addition & 1 deletion .github/dependabot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ updates:
- /clients/python
- /dev/breeze
- /docker_tests
- /task_sdk
- /task-sdk
- /
schedule:
interval: daily
Expand Down
16 changes: 8 additions & 8 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -676,7 +676,7 @@ repos:
name: Sync template context variable refs
language: python
entry: ./scripts/ci/pre_commit/template_context_key_sync.py
files: ^airflow/models/taskinstance\.py$|^task_sdk/src/airflow/sdk/definitions/context\.py$|^docs/apache-airflow/templates-ref\.rst$
files: ^airflow/models/taskinstance\.py$|^task-sdk/src/airflow/sdk/definitions/context\.py$|^docs/apache-airflow/templates-ref\.rst$
- id: check-base-operator-usage
language: pygrep
name: Check BaseOperator core imports
Expand Down Expand Up @@ -1174,9 +1174,9 @@ repos:
^tests/ |
^providers/tests/ |
^providers/.*/tests/ |
^task_sdk/src/airflow/sdk/definitions/dag.py$ |
^task_sdk/src/airflow/sdk/execution_time/secrets_masker.py$ |
^task_sdk/src/airflow/sdk/definitions/_internal/node.py$ |
^task-sdk/src/airflow/sdk/definitions/dag.py$ |
^task-sdk/src/airflow/sdk/execution_time/secrets_masker.py$ |
^task-sdk/src/airflow/sdk/definitions/_internal/node.py$ |
^dev/.*\.py$ |
^scripts/.*\.py$ |
^docker_tests/.*$ |
Expand Down Expand Up @@ -1293,7 +1293,7 @@ repos:
provider_packages|
providers/|
scripts|
task_sdk/|
task-sdk/|
tests/dags/test_imports\.py
)
require_serial: true
Expand Down Expand Up @@ -1344,14 +1344,14 @@ repos:
name: Run mypy for Task SDK
language: python
entry: ./scripts/ci/pre_commit/mypy.py --namespace-packages
files: ^task_sdk/src/airflow/sdk/.*\.py$|^task_sdk/tests//.*\.py$
files: ^task-sdk/src/airflow/sdk/.*\.py$|^task-sdk/tests//.*\.py$
require_serial: true
additional_dependencies: ['rich>=12.4.4']
- id: mypy-task-sdk
stages: ['manual']
name: Run mypy for Task SDK (manual)
language: python
entry: ./scripts/ci/pre_commit/mypy_folder.py task_sdk/src/airflow/sdk
entry: ./scripts/ci/pre_commit/mypy_folder.py task-sdk/src/airflow/sdk
pass_filenames: false
files: ^.*\.py$
require_serial: true
Expand Down Expand Up @@ -1398,7 +1398,7 @@ repos:
- id: generate-tasksdk-datamodels
name: Generate Datamodels for TaskSDK client
language: python
entry: uv run --active --group codegen --project apache-airflow-task-sdk --directory task_sdk -s dev/generate_models.py
entry: uv run --active --group codegen --project apache-airflow-task-sdk --directory task-sdk -s dev/generate_models.py
pass_filenames: false
files: ^airflow/api_fastapi/execution_api/.*\.py$
require_serial: true
Expand Down
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -815,7 +815,7 @@ function install_airflow() {
local installation_command_flags
if [[ ${AIRFLOW_INSTALLATION_METHOD} == "." ]]; then
# When installing from sources - we always use `--editable` mode
installation_command_flags="--editable .[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION} --editable ./task_sdk --editable ./devel-common"
installation_command_flags="--editable .[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION} --editable ./task-sdk --editable ./devel-common"
while IFS= read -r -d '' pyproject_toml_file; do
project_folder=$(dirname ${pyproject_toml_file})
installation_command_flags="${installation_command_flags} --editable ${project_folder}"
Expand Down
4 changes: 2 additions & 2 deletions Dockerfile.ci
Original file line number Diff line number Diff line change
Expand Up @@ -585,7 +585,7 @@ function install_airflow() {
local installation_command_flags
if [[ ${AIRFLOW_INSTALLATION_METHOD} == "." ]]; then
# When installing from sources - we always use `--editable` mode
installation_command_flags="--editable .[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION} --editable ./task_sdk --editable ./devel-common"
installation_command_flags="--editable .[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION} --editable ./task-sdk --editable ./devel-common"
while IFS= read -r -d '' pyproject_toml_file; do
project_folder=$(dirname ${pyproject_toml_file})
installation_command_flags="${installation_command_flags} --editable ${project_folder}"
Expand Down Expand Up @@ -1001,7 +1001,7 @@ function check_force_lowest_dependencies() {
echo
fi
set -x
uv pip install --python "$(which python)" --resolution lowest-direct --upgrade --editable ".${EXTRA}" --editable "./task_sdk" --editable "./devel-common"
uv pip install --python "$(which python)" --resolution lowest-direct --upgrade --editable ".${EXTRA}" --editable "./task-sdk" --editable "./devel-common"
set +x
}

Expand Down
2 changes: 1 addition & 1 deletion airflow/models/taskinstance.py
Original file line number Diff line number Diff line change
Expand Up @@ -968,7 +968,7 @@ def get_triggering_events() -> dict[str, list[AssetEvent]]:
return triggering_events

# NOTE: If you add to this dict, make sure to also update the following:
# * Context in task_sdk/src/airflow/sdk/definitions/context.py
# * Context in task-sdk/src/airflow/sdk/definitions/context.py
# * KNOWN_CONTEXT_KEYS in airflow/utils/context.py
# * Table in docs/apache-airflow/templates-ref.rst

Expand Down
2 changes: 1 addition & 1 deletion airflow/utils/context.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@
from airflow.sdk.types import OutletEventAccessorsProtocol

# NOTE: Please keep this in sync with the following:
# * Context in task_sdk/src/airflow/sdk/definitions/context.py
# * Context in task-sdk/src/airflow/sdk/definitions/context.py
# * Table in docs/apache-airflow/templates-ref.rst
KNOWN_CONTEXT_KEYS: set[str] = {
"conn",
Expand Down
6 changes: 3 additions & 3 deletions contributing-docs/07_local_virtualenv.rst
Original file line number Diff line number Diff line change
Expand Up @@ -227,7 +227,7 @@ dependencies. For example, to install Amazon provider you need to install ``amaz

.. code:: bash

pip install -e "./task_sdk"
pip install -e "./task-sdk"
pip install -e "./devel-common"
pip install -e "./providers/amazon"
pip install -e ".[amazon]"
Expand Down Expand Up @@ -284,7 +284,7 @@ You can run the following command in the venv that you have installed airflow in
.. code:: bash

pip install -e ".[google]"
pip install -e "./task_sdk"
pip install -e "./task-sdk"
pip install -e "./devel-common"
pip install -e "./providers/google"

Expand All @@ -294,7 +294,7 @@ dependency groups will be implemented in ``pip`` - April 2025) - it will not be
when installing airflow - currently with ``pip`` it is the only way to install development dependencies
of the provider and is a bit convoluted.

The second installs ``task_sdk`` project - where APIs for providers are kept.
The second installs ``task-sdk`` project - where APIs for providers are kept.

The third one installs google provider source code in development mode, so that modifications
to the code are automatically reflected in your installed virtualenv.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,10 +39,10 @@ Setup your project
alt="Cloning github fork to Pycharm">
</div>

3. Configure the source root directories well as for ``task_sdk`` and ``devel-common``.
3. Configure the source root directories well as for ``task-sdk`` and ``devel-common``.
You also have to set "source" and "tests" root directories for each provider you want to develop (!).

This is important in Airflow 3.0 we split ``task_sdk``, ``devel-common`` and each provider to be separate
This is important in Airflow 3.0 we split ``task-sdk``, ``devel-common`` and each provider to be separate
distribution - each with separate ``pyproject.toml`` file, so you need to separately
add "src" and "tests" directories for each provider you develop to be respectively
"source roots" and "test roots".
Expand All @@ -57,12 +57,12 @@ Setup your project
alt="Adding Source Root directories to Pycharm">
</div>

You also need to add ``task_sdk`` sources (and ``devel-common`` in similar way).
You also need to add ``task-sdk`` sources (and ``devel-common`` in similar way).

.. raw:: html

<div align="center" style="padding-bottom:10px">
<img src="images/pycharm_add_task_sdk_sources.png"
<img src="images/pycharm_add_task-sdk_sources.png"
alt="Adding Source Root directories to Pycharm">
</div>

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -627,7 +627,7 @@ def prepare_airflow_packages(
get_console().print("[success]Successfully prepared Airflow packages")


TASK_SDK_DIR_PATH = AIRFLOW_SOURCES_ROOT / "task_sdk"
TASK_SDK_DIR_PATH = AIRFLOW_SOURCES_ROOT / "task-sdk"
TASK_SDK_DIST_DIR_PATH = TASK_SDK_DIR_PATH / "dist"


Expand Down Expand Up @@ -692,7 +692,7 @@ def _build_package_with_docker(package_format: str):
"-e",
"GITHUB_ACTIONS",
"-w",
"/opt/airflow/task_sdk",
"/opt/airflow/task-sdk",
AIRFLOW_BUILD_IMAGE_TAG,
"bash",
"-c",
Expand All @@ -707,7 +707,7 @@ def _build_package_with_docker(package_format: str):
DIST_DIR.mkdir(parents=True, exist_ok=True)
get_console().print()
# Copy all files in the dist directory in container to the host dist directory (note '/.' in SRC)
run_command(["docker", "cp", f"{container_id}:/opt/airflow/task_sdk/dist/.", "./dist"], check=True)
run_command(["docker", "cp", f"{container_id}:/opt/airflow/task-sdk/dist/.", "./dist"], check=True)
run_command(["docker", "rm", "--force", container_id], check=False, stdout=DEVNULL, stderr=DEVNULL)

if use_local_hatch:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ def docker_compose_tests(
sys.exit(return_code)


TEST_PROGRESS_REGEXP = r"tests/.*|providers/.*/tests/.*|task_sdk/tests/.*|.*=====.*"
TEST_PROGRESS_REGEXP = r"tests/.*|providers/.*/tests/.*|task-sdk/tests/.*|.*=====.*"
PERCENT_TEST_PROGRESS_REGEXP = r"^tests/.*\[[ \d%]*\].*|^\..*\[[ \d%]*\].*"


Expand Down
2 changes: 1 addition & 1 deletion dev/breeze/src/airflow_breeze/global_constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -287,7 +287,7 @@ def all_task_sdk_test_packages() -> list[str]:
return sorted(
[
candidate.name
for candidate in (AIRFLOW_SOURCES_ROOT / "task_sdk" / "tests").iterdir()
for candidate in (AIRFLOW_SOURCES_ROOT / "task-sdk" / "tests").iterdir()
if candidate.is_dir() and candidate.name != "__pycache__"
]
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@
("hooks", "/opt/airflow/hooks"),
("logs", "/root/airflow/logs"),
("providers", "/opt/airflow/providers"),
("task_sdk", "/opt/airflow/task_sdk"),
("task-sdk", "/opt/airflow/task-sdk"),
("pyproject.toml", "/opt/airflow/pyproject.toml"),
("scripts", "/opt/airflow/scripts"),
("scripts/docker/entrypoint_ci.sh", "/entrypoint"),
Expand Down
4 changes: 2 additions & 2 deletions dev/breeze/src/airflow_breeze/utils/run_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ def get_excluded_provider_args(python_version: str) -> list[str]:
"Serialization": [
"tests/serialization",
],
"TaskSDK": ["task_sdk/tests"],
"TaskSDK": ["task-sdk/tests"],
"WWW": [
"tests/www",
],
Expand Down Expand Up @@ -200,7 +200,7 @@ def get_excluded_provider_args(python_version: str) -> list[str]:
TEST_GROUP_TO_TEST_FOLDERS: dict[GroupOfTests, list[str]] = {
GroupOfTests.CORE: ["tests"],
GroupOfTests.PROVIDERS: ALL_PROVIDER_TEST_FOLDERS,
GroupOfTests.TASK_SDK: ["task_sdk/tests"],
GroupOfTests.TASK_SDK: ["task-sdk/tests"],
GroupOfTests.HELM: ["helm_tests"],
GroupOfTests.INTEGRATION_CORE: ["tests/integration"],
GroupOfTests.INTEGRATION_PROVIDERS: ALL_PROVIDER_INTEGRATION_TEST_FOLDERS,
Expand Down
22 changes: 11 additions & 11 deletions dev/breeze/src/airflow_breeze/utils/selective_checks.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ def __hash__(self):
r"^chart",
r"^providers/.*/src/",
r"^providers/.*/docs/",
r"^task_sdk/src/",
r"^task-sdk/src/",
r"^tests/system",
r"^CHANGELOG\.txt",
r"^airflow/config_templates/config\.yml",
Expand Down Expand Up @@ -226,8 +226,8 @@ def __hash__(self):
r"^providers/tests/",
r"^providers/.*/src/",
r"^providers/.*/tests/",
r"^task_sdk/src/",
r"^task_sdk/tests/",
r"^task-sdk/src/",
r"^task-sdk/tests/",
r"^tests",
r"^devel-common",
r"^kubernetes_tests",
Expand All @@ -246,18 +246,18 @@ def __hash__(self):
r"^devel-common/.*\.py$",
],
FileGroupForCi.TASK_SDK_FILES: [
r"^task_sdk/src/airflow/sdk/.*\.py$",
r"^task_sdk/tests/.*\.py$",
r"^task-sdk/src/airflow/sdk/.*\.py$",
r"^task-sdk/tests/.*\.py$",
],
FileGroupForCi.ASSET_FILES: [
r"^airflow/assets/",
r"^airflow/models/assets/",
r"^task_sdk/src/airflow/sdk/definitions/asset/",
r"^task-sdk/src/airflow/sdk/definitions/asset/",
r"^airflow/datasets/",
],
FileGroupForCi.UNIT_TEST_FILES: [
r"^tests/",
r"^task_sdk/tests/",
r"^task-sdk/tests/",
r"^providers/.*/tests/",
r"^dev/breeze/tests/",
],
Expand All @@ -274,8 +274,8 @@ def __hash__(self):
r"^docs/.*",
r"^providers/.*/tests/.*",
r"^tests/dags/test_imports.py",
r"^task_sdk/src/airflow/sdk/.*\.py$",
r"^task_sdk/tests/.*\.py$",
r"^task-sdk/src/airflow/sdk/.*\.py$",
r"^task-sdk/tests/.*\.py$",
]
}
)
Expand Down Expand Up @@ -307,8 +307,8 @@ def __hash__(self):
r"^providers/.*/tests/",
],
SelectiveTaskSdkTestType.TASK_SDK: [
r"^task_sdk/src/",
r"^task_sdk/tests/",
r"^task-sdk/src/",
r"^task-sdk/tests/",
],
SelectiveCoreTestType.SERIALIZATION: [
r"^airflow/serialization/",
Expand Down
2 changes: 1 addition & 1 deletion dev/breeze/tests/test_pytest_args_for_test_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -304,7 +304,7 @@ def test_pytest_args_for_missing_provider():
GroupOfTests.TASK_SDK,
"All",
[
"task_sdk/tests",
"task-sdk/tests",
],
),
(
Expand Down
14 changes: 7 additions & 7 deletions dev/breeze/tests/test_selective_checks.py
Original file line number Diff line number Diff line change
Expand Up @@ -492,7 +492,7 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str):
),
(
pytest.param(
("task_sdk/src/airflow/sdk/random.py",),
("task-sdk/src/airflow/sdk/random.py",),
{
"all-python-versions": "['3.9']",
"all-python-versions-list-as-string": "3.9",
Expand Down Expand Up @@ -1688,7 +1688,7 @@ def test_expected_output_push(
(
"airflow/assets/",
"airflow/models/assets/",
"task_sdk/src/airflow/sdk/definitions/asset/",
"task-sdk/src/airflow/sdk/definitions/asset/",
"airflow/datasets/",
),
{
Expand Down Expand Up @@ -2455,7 +2455,7 @@ def test_provider_compatibility_checks(labels: tuple[str, ...], expected_outputs
id="Airflow mypy checks on airflow files with model changes.",
),
pytest.param(
("task_sdk/src/airflow/sdk/a_file.py",),
("task-sdk/src/airflow/sdk/a_file.py",),
{
"needs-mypy": "true",
"mypy-checks": "['mypy-providers', 'mypy-task-sdk']",
Expand Down Expand Up @@ -2594,7 +2594,7 @@ def test_pr_labels(
id="Caplog is in the git diff Providers",
),
pytest.param(
("task_sdk/tests/definitions/test_dag.py",),
("task-sdk/tests/definitions/test_dag.py",),
(),
GithubEvents.PULL_REQUEST,
id="Caplog is in the git diff TaskSDK",
Expand Down Expand Up @@ -2651,7 +2651,7 @@ def test_is_log_mocked_in_the_tests_fail(
id="Caplog is in the git diff Providers",
),
pytest.param(
("task_sdk/tests/definitions/test_dag.py",),
("task-sdk/tests/definitions/test_dag.py",),
(),
GithubEvents.PULL_REQUEST,
id="Caplog is in the git diff TaskSDK",
Expand Down Expand Up @@ -2711,7 +2711,7 @@ def test_is_log_mocked_in_the_tests_fail_formatted(
id="Caplog is in the git diff Providers",
),
pytest.param(
("task_sdk/tests/definitions/test_dag.py",),
("task-sdk/tests/definitions/test_dag.py",),
(),
GithubEvents.PULL_REQUEST,
id="Caplog is in the git diff TaskSDK",
Expand Down Expand Up @@ -2759,7 +2759,7 @@ def test_is_log_mocked_in_the_tests_not_fail(
id="Caplog is in the git diff Providers",
),
pytest.param(
("task_sdk/tests/definitions/test_dag.py",),
("task-sdk/tests/definitions/test_dag.py",),
(LOG_WITHOUT_MOCK_IN_TESTS_EXCEPTION_LABEL,),
GithubEvents.PULL_REQUEST,
id="Caplog is in the git diff TaskSDK",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -97,4 +97,4 @@ In the following example, failures in any task call the ``task_failure_alert`` f
to be executed in the desired event. Simply pass a list of callback functions to the callback args when defining your DAG/task
callbacks: e.g ``on_failure_callback=[callback_func_1, callback_func_2]``

Full list of variables available in ``context`` in :doc:`docs <../../templates-ref>` and `code <https://github.com/apache/airflow/blob/main/task_sdk/src/airflow/sdk/definitions/context.py>`_.
Full list of variables available in ``context`` in :doc:`docs <../../templates-ref>` and `code <https://github.com/apache/airflow/blob/main/task-sdk/src/airflow/sdk/definitions/context.py>`_.
Loading