From 92b760a462c40fa3390f634145e6d7723ad9ab23 Mon Sep 17 00:00:00 2001 From: Wei Lee Date: Fri, 13 Jun 2025 17:49:35 +0800 Subject: [PATCH 01/30] feat: AIP-90 PoC --- airflow-core/docs/img/airflow_erd.svg | 3224 +++++++++-------- .../execution_api/routes/__init__.py | 3 + .../api_fastapi/execution_api/routes/hitl.py | 125 + .../src/airflow/jobs/triggerer_job_runner.py | 37 +- airflow-core/src/airflow/utils/db.py | 25 +- .../core_api/routes/public/test_plugins.py | 7 +- .../unit/plugins/test_plugins_manager.py | 2 +- airflow-core/tests/unit/utils/test_db.py | 6 + providers/standard/provider.yaml | 6 + providers/standard/pyproject.toml | 3 + .../airflow/providers/standard/alembic.ini | 133 + .../providers/standard/api/__init__.py | 39 + .../airflow/providers/standard/api/client.py | 97 + .../standard/api_fastapi/__init__.py | 16 + .../standard/api_fastapi/core_api/__init__.py | 16 + .../core_api/datamodels/__init__.py | 16 + .../api_fastapi/core_api/datamodels/hitl.py | 75 + .../api_fastapi/core_api/routes/__init__.py | 16 + .../api_fastapi/core_api/routes/hitl.py | 147 + .../api_fastapi/execution_api/__init__.py | 16 + .../execution_api/datamodels/__init__.py | 16 + .../execution_api/datamodels/hitl.py | 59 + .../execution_api/routes/__init__.py | 16 + .../airflow/providers/standard/exceptions.py | 4 + .../standard/execution_time/__init__.py | 16 + .../standard/execution_time/comms.py | 67 + .../providers/standard/execution_time/hitl.py | 83 + .../providers/standard/get_provider_info.py | 8 + .../providers/standard/migrations/README | 1 + .../providers/standard/migrations/__init__.py | 16 + .../providers/standard/migrations/env.py | 126 + .../standard/migrations/script.py.mako | 45 + .../0001_3_0_placeholder_migration.py | 45 + .../standard/migrations/versions/__init__.py | 16 + .../providers/standard/models/__init__.py | 71 + .../airflow/providers/standard/models/db.py | 45 + .../providers/standard/operators/hitl.py | 205 ++ .../providers/standard/plugins/__init__.py | 16 + .../providers/standard/plugins/hitl.py | 75 + .../providers/standard/triggers/hitl.py | 113 + .../tests/unit/standard/api/__init__.py | 16 + .../tests/unit/standard/api/test_client.py | 136 + .../unit/standard/api_fastapi/__init__.py | 16 + .../standard/api_fastapi/core_api/__init__.py | 16 + .../core_api/datamodels/__init__.py | 16 + .../core_api/datamodels/test_hitl.py | 16 + .../api_fastapi/core_api/routes/__init__.py | 16 + .../api_fastapi/core_api/routes/test_hitl.py | 16 + .../api_fastapi/execution_api/__init__.py | 16 + .../execution_api/datamodels/__init__.py | 16 + .../execution_api/datamodels/test_hitl.py | 16 + .../execution_api/routes/__init__.py | 16 + .../execution_api/routes/test_hitl.py | 16 + .../unit/standard/execution_time/__init__.py | 16 + .../standard/execution_time/test_comms.py | 16 + .../unit/standard/execution_time/test_hitl.py | 16 + .../unit/standard/migrations/__init__.py | 16 + .../unit/standard/migrations/test_env.py | 16 + .../tests/unit/standard/models/__init__.py | 16 + .../tests/unit/standard/models/test_db.py | 16 + .../unit/standard/operators/test_hitl.py | 16 + .../tests/unit/standard/plugins/__init__.py | 16 + .../tests/unit/standard/plugins/test_hitl.py | 16 + .../tests/unit/standard/triggers/test_hitl.py | 126 + pyproject.toml | 1 + task-sdk/src/airflow/sdk/api/client.py | 12 + .../airflow/sdk/api/datamodels/_generated.py | 54 + .../src/airflow/sdk/execution_time/comms.py | 23 +- .../airflow/sdk/execution_time/supervisor.py | 18 + 69 files changed, 4260 insertions(+), 1514 deletions(-) create mode 100644 airflow-core/src/airflow/api_fastapi/execution_api/routes/hitl.py create mode 100644 providers/standard/src/airflow/providers/standard/alembic.ini create mode 100644 providers/standard/src/airflow/providers/standard/api/__init__.py create mode 100644 providers/standard/src/airflow/providers/standard/api/client.py create mode 100644 providers/standard/src/airflow/providers/standard/api_fastapi/__init__.py create mode 100644 providers/standard/src/airflow/providers/standard/api_fastapi/core_api/__init__.py create mode 100644 providers/standard/src/airflow/providers/standard/api_fastapi/core_api/datamodels/__init__.py create mode 100644 providers/standard/src/airflow/providers/standard/api_fastapi/core_api/datamodels/hitl.py create mode 100644 providers/standard/src/airflow/providers/standard/api_fastapi/core_api/routes/__init__.py create mode 100644 providers/standard/src/airflow/providers/standard/api_fastapi/core_api/routes/hitl.py create mode 100644 providers/standard/src/airflow/providers/standard/api_fastapi/execution_api/__init__.py create mode 100644 providers/standard/src/airflow/providers/standard/api_fastapi/execution_api/datamodels/__init__.py create mode 100644 providers/standard/src/airflow/providers/standard/api_fastapi/execution_api/datamodels/hitl.py create mode 100644 providers/standard/src/airflow/providers/standard/api_fastapi/execution_api/routes/__init__.py create mode 100644 providers/standard/src/airflow/providers/standard/execution_time/__init__.py create mode 100644 providers/standard/src/airflow/providers/standard/execution_time/comms.py create mode 100644 providers/standard/src/airflow/providers/standard/execution_time/hitl.py create mode 100644 providers/standard/src/airflow/providers/standard/migrations/README create mode 100644 providers/standard/src/airflow/providers/standard/migrations/__init__.py create mode 100644 providers/standard/src/airflow/providers/standard/migrations/env.py create mode 100644 providers/standard/src/airflow/providers/standard/migrations/script.py.mako create mode 100644 providers/standard/src/airflow/providers/standard/migrations/versions/0001_3_0_placeholder_migration.py create mode 100644 providers/standard/src/airflow/providers/standard/migrations/versions/__init__.py create mode 100644 providers/standard/src/airflow/providers/standard/models/__init__.py create mode 100644 providers/standard/src/airflow/providers/standard/models/db.py create mode 100644 providers/standard/src/airflow/providers/standard/operators/hitl.py create mode 100644 providers/standard/src/airflow/providers/standard/plugins/__init__.py create mode 100644 providers/standard/src/airflow/providers/standard/plugins/hitl.py create mode 100644 providers/standard/src/airflow/providers/standard/triggers/hitl.py create mode 100644 providers/standard/tests/unit/standard/api/__init__.py create mode 100644 providers/standard/tests/unit/standard/api/test_client.py create mode 100644 providers/standard/tests/unit/standard/api_fastapi/__init__.py create mode 100644 providers/standard/tests/unit/standard/api_fastapi/core_api/__init__.py create mode 100644 providers/standard/tests/unit/standard/api_fastapi/core_api/datamodels/__init__.py create mode 100644 providers/standard/tests/unit/standard/api_fastapi/core_api/datamodels/test_hitl.py create mode 100644 providers/standard/tests/unit/standard/api_fastapi/core_api/routes/__init__.py create mode 100644 providers/standard/tests/unit/standard/api_fastapi/core_api/routes/test_hitl.py create mode 100644 providers/standard/tests/unit/standard/api_fastapi/execution_api/__init__.py create mode 100644 providers/standard/tests/unit/standard/api_fastapi/execution_api/datamodels/__init__.py create mode 100644 providers/standard/tests/unit/standard/api_fastapi/execution_api/datamodels/test_hitl.py create mode 100644 providers/standard/tests/unit/standard/api_fastapi/execution_api/routes/__init__.py create mode 100644 providers/standard/tests/unit/standard/api_fastapi/execution_api/routes/test_hitl.py create mode 100644 providers/standard/tests/unit/standard/execution_time/__init__.py create mode 100644 providers/standard/tests/unit/standard/execution_time/test_comms.py create mode 100644 providers/standard/tests/unit/standard/execution_time/test_hitl.py create mode 100644 providers/standard/tests/unit/standard/migrations/__init__.py create mode 100644 providers/standard/tests/unit/standard/migrations/test_env.py create mode 100644 providers/standard/tests/unit/standard/models/__init__.py create mode 100644 providers/standard/tests/unit/standard/models/test_db.py create mode 100644 providers/standard/tests/unit/standard/operators/test_hitl.py create mode 100644 providers/standard/tests/unit/standard/plugins/__init__.py create mode 100644 providers/standard/tests/unit/standard/plugins/test_hitl.py create mode 100644 providers/standard/tests/unit/standard/triggers/test_hitl.py diff --git a/airflow-core/docs/img/airflow_erd.svg b/airflow-core/docs/img/airflow_erd.svg index 5565970e5573f..4877d50b50e19 100644 --- a/airflow-core/docs/img/airflow_erd.svg +++ b/airflow-core/docs/img/airflow_erd.svg @@ -4,11 +4,11 @@ - - + + %3 - + dag_priority_parsing_request @@ -247,511 +247,567 @@ [INTEGER] NOT NULL - + +hitl_response + +hitl_response + +ti_id + + [UUID] + NOT NULL + +body + + [TEXT] + +default + + [JSON] + +multiple + + [BOOLEAN] + +options + + [JSON] + NOT NULL + +params + + [JSON] + NOT NULL + +params_input + + [JSON] + NOT NULL + +response_at + + [TIMESTAMP] + +response_content + + [JSON] + +subject + + [TEXT] + NOT NULL + +user_id + + [VARCHAR(128)] + + + slot_pool - -slot_pool - -id - - [INTEGER] - NOT NULL - -description - - [TEXT] - -include_deferred - - [BOOLEAN] - NOT NULL - -pool - - [VARCHAR(256)] - -slots - - [INTEGER] + +slot_pool + +id + + [INTEGER] + NOT NULL + +description + + [TEXT] + +include_deferred + + [BOOLEAN] + NOT NULL + +pool + + [VARCHAR(256)] + +slots + + [INTEGER] - + import_error - -import_error - -id - - [INTEGER] - NOT NULL - -bundle_name - - [VARCHAR(250)] - -filename - - [VARCHAR(1024)] - -stacktrace - - [TEXT] - -timestamp - - [TIMESTAMP] + +import_error + +id + + [INTEGER] + NOT NULL + +bundle_name + + [VARCHAR(250)] + +filename + + [VARCHAR(1024)] + +stacktrace + + [TEXT] + +timestamp + + [TIMESTAMP] - + asset_alias - -asset_alias - -id - - [INTEGER] - NOT NULL - -group - - [VARCHAR(1500)] - NOT NULL - -name - - [VARCHAR(1500)] - NOT NULL + +asset_alias + +id + + [INTEGER] + NOT NULL + +group + + [VARCHAR(1500)] + NOT NULL + +name + + [VARCHAR(1500)] + NOT NULL - + asset_alias_asset - -asset_alias_asset - -alias_id - - [INTEGER] - NOT NULL - -asset_id - - [INTEGER] - NOT NULL + +asset_alias_asset + +alias_id + + [INTEGER] + NOT NULL + +asset_id + + [INTEGER] + NOT NULL asset_alias--asset_alias_asset - -0..N -1 + +0..N +1 - + asset_alias_asset_event - -asset_alias_asset_event - -alias_id - - [INTEGER] - NOT NULL - -event_id - - [INTEGER] - NOT NULL + +asset_alias_asset_event + +alias_id + + [INTEGER] + NOT NULL + +event_id + + [INTEGER] + NOT NULL asset_alias--asset_alias_asset_event - -0..N -1 + +0..N +1 - + dag_schedule_asset_alias_reference - -dag_schedule_asset_alias_reference - -alias_id - - [INTEGER] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL + +dag_schedule_asset_alias_reference + +alias_id + + [INTEGER] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL asset_alias--dag_schedule_asset_alias_reference - -0..N -1 + +0..N +1 - + asset - -asset - -id - - [INTEGER] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -extra - - [JSON] - NOT NULL - -group - - [VARCHAR(1500)] - NOT NULL - -name - - [VARCHAR(1500)] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL - -uri - - [VARCHAR(1500)] - NOT NULL + +asset + +id + + [INTEGER] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +extra + + [JSON] + NOT NULL + +group + + [VARCHAR(1500)] + NOT NULL + +name + + [VARCHAR(1500)] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL + +uri + + [VARCHAR(1500)] + NOT NULL asset--asset_alias_asset - -0..N -1 + +0..N +1 - + asset_trigger - -asset_trigger - -asset_id - - [INTEGER] - NOT NULL - -trigger_id - - [INTEGER] - NOT NULL + +asset_trigger + +asset_id + + [INTEGER] + NOT NULL + +trigger_id + + [INTEGER] + NOT NULL asset--asset_trigger - -0..N -1 + +0..N +1 - + asset_active - -asset_active - -name - - [VARCHAR(1500)] - NOT NULL - -uri - - [VARCHAR(1500)] - NOT NULL + +asset_active + +name + + [VARCHAR(1500)] + NOT NULL + +uri + + [VARCHAR(1500)] + NOT NULL asset--asset_active - -1 -1 + +1 +1 asset--asset_active - -1 -1 + +1 +1 - + dag_schedule_asset_reference - -dag_schedule_asset_reference - -asset_id - - [INTEGER] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL + +dag_schedule_asset_reference + +asset_id + + [INTEGER] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL asset--dag_schedule_asset_reference - -0..N -1 + +0..N +1 - + task_outlet_asset_reference - -task_outlet_asset_reference - -asset_id - - [INTEGER] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL + +task_outlet_asset_reference + +asset_id + + [INTEGER] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL asset--task_outlet_asset_reference - -0..N -1 + +0..N +1 - + task_inlet_asset_reference - -task_inlet_asset_reference - -asset_id - - [INTEGER] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL + +task_inlet_asset_reference + +asset_id + + [INTEGER] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL asset--task_inlet_asset_reference - -0..N -1 + +0..N +1 - + asset_dag_run_queue - -asset_dag_run_queue - -asset_id - - [INTEGER] - NOT NULL - -target_dag_id - - [VARCHAR(250)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL + +asset_dag_run_queue + +asset_id + + [INTEGER] + NOT NULL + +target_dag_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL asset--asset_dag_run_queue - -0..N -1 + +0..N +1 - + asset_event - -asset_event - -id - - [INTEGER] - NOT NULL - -asset_id - - [INTEGER] - NOT NULL - -extra - - [JSON] - NOT NULL - -source_dag_id - - [VARCHAR(250)] - -source_map_index - - [INTEGER] - -source_run_id - - [VARCHAR(250)] - -source_task_id - - [VARCHAR(250)] - -timestamp - - [TIMESTAMP] - NOT NULL + +asset_event + +id + + [INTEGER] + NOT NULL + +asset_id + + [INTEGER] + NOT NULL + +extra + + [JSON] + NOT NULL + +source_dag_id + + [VARCHAR(250)] + +source_map_index + + [INTEGER] + +source_run_id + + [VARCHAR(250)] + +source_task_id + + [VARCHAR(250)] + +timestamp + + [TIMESTAMP] + NOT NULL asset_event--asset_alias_asset_event - -0..N -1 + +0..N +1 - + dagrun_asset_event - -dagrun_asset_event - -dag_run_id - - [INTEGER] - NOT NULL - -event_id - - [INTEGER] - NOT NULL + +dagrun_asset_event + +dag_run_id + + [INTEGER] + NOT NULL + +event_id + + [INTEGER] + NOT NULL asset_event--dagrun_asset_event - -0..N -1 + +0..N +1 - + trigger - -trigger - -id - - [INTEGER] - NOT NULL - -classpath - - [VARCHAR(1000)] - NOT NULL - -created_date - - [TIMESTAMP] - NOT NULL - -kwargs - - [TEXT] - NOT NULL - -triggerer_id - - [INTEGER] + +trigger + +id + + [INTEGER] + NOT NULL + +classpath + + [VARCHAR(1000)] + NOT NULL + +created_date + + [TIMESTAMP] + NOT NULL + +kwargs + + [TEXT] + NOT NULL + +triggerer_id + + [INTEGER] trigger--asset_trigger - -0..N -1 + +0..N +1 - + task_instance +<<<<<<< HEAD task_instance @@ -911,1363 +967,1529 @@ updated_at [TIMESTAMP] +======= + +task_instance + +id + + [UUID] + NOT NULL + +context_carrier + + [JSONB] + +custom_operator_name + + [VARCHAR(1000)] + +dag_id + + [VARCHAR(250)] + NOT NULL + +dag_version_id + + [UUID] + +duration + + [DOUBLE_PRECISION] + +end_date + + [TIMESTAMP] + +executor + + [VARCHAR(1000)] + +executor_config + + [BYTEA] + +external_executor_id + + [VARCHAR(250)] + +hostname + + [VARCHAR(1000)] + +last_heartbeat_at + + [TIMESTAMP] + +map_index + + [INTEGER] + NOT NULL + +max_tries + + [INTEGER] + +next_kwargs + + [JSONB] + +next_method + + [VARCHAR(1000)] + +operator + + [VARCHAR(1000)] + +pid + + [INTEGER] + +pool + + [VARCHAR(256)] + NOT NULL + +pool_slots + + [INTEGER] + NOT NULL + +priority_weight + + [INTEGER] + +queue + + [VARCHAR(256)] + +queued_by_job_id + + [INTEGER] + +queued_dttm + + [TIMESTAMP] + +rendered_map_index + + [VARCHAR(250)] + +run_id + + [VARCHAR(250)] + NOT NULL + +scheduled_dttm + + [TIMESTAMP] + +span_status + + [VARCHAR(250)] + NOT NULL + +start_date + + [TIMESTAMP] + +state + + [VARCHAR(20)] + +task_display_name + + [VARCHAR(2000)] + +task_id + + [VARCHAR(250)] + NOT NULL + +trigger_id + + [INTEGER] + +trigger_timeout + + [TIMESTAMP] + +try_number + + [INTEGER] + +unixname + + [VARCHAR(1000)] + +updated_at + + [TIMESTAMP] +>>>>>>> fc78b17140f (feat: AIP-90 PoC) trigger--task_instance - -0..N -{0,1} + +0..N +{0,1} - + task_map - -task_map - -dag_id - - [VARCHAR(250)] - NOT NULL - -map_index - - [INTEGER] - NOT NULL - -run_id - - [VARCHAR(250)] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -keys - - [JSONB] - -length - - [INTEGER] - NOT NULL + +task_map + +dag_id + + [VARCHAR(250)] + NOT NULL + +map_index + + [INTEGER] + NOT NULL + +run_id + + [VARCHAR(250)] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +keys + + [JSONB] + +length + + [INTEGER] + NOT NULL task_instance--task_map - -0..N -1 + +0..N +1 task_instance--task_map - -0..N -1 + +0..N +1 task_instance--task_map - -0..N -1 + +0..N +1 task_instance--task_map - -0..N -1 + +0..N +1 - + task_reschedule - -task_reschedule - -id - - [INTEGER] - NOT NULL - -duration - - [INTEGER] - NOT NULL - -end_date - - [TIMESTAMP] - NOT NULL - -reschedule_date - - [TIMESTAMP] - NOT NULL - -start_date - - [TIMESTAMP] - NOT NULL - -ti_id - - [UUID] - NOT NULL + +task_reschedule + +id + + [INTEGER] + NOT NULL + +duration + + [INTEGER] + NOT NULL + +end_date + + [TIMESTAMP] + NOT NULL + +reschedule_date + + [TIMESTAMP] + NOT NULL + +start_date + + [TIMESTAMP] + NOT NULL + +ti_id + + [UUID] + NOT NULL task_instance--task_reschedule - -0..N -1 + +0..N +1 - + xcom - -xcom - -dag_run_id - - [INTEGER] - NOT NULL - -key - - [VARCHAR(512)] - NOT NULL - -map_index - - [INTEGER] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -run_id - - [VARCHAR(250)] - NOT NULL - -timestamp - - [TIMESTAMP] - NOT NULL - -value - - [JSONB] + +xcom + +dag_run_id + + [INTEGER] + NOT NULL + +key + + [VARCHAR(512)] + NOT NULL + +map_index + + [INTEGER] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +run_id + + [VARCHAR(250)] + NOT NULL + +timestamp + + [TIMESTAMP] + NOT NULL + +value + + [JSONB] task_instance--xcom - -0..N -1 + +0..N +1 task_instance--xcom - -0..N -1 + +0..N +1 task_instance--xcom - -0..N -1 + +0..N +1 task_instance--xcom - -0..N -1 + +0..N +1 - + task_instance_note - -task_instance_note - -ti_id - - [UUID] - NOT NULL - -content - - [VARCHAR(1000)] - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL - -user_id - - [VARCHAR(128)] + +task_instance_note + +ti_id + + [UUID] + NOT NULL + +content + + [VARCHAR(1000)] + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL + +user_id + + [VARCHAR(128)] task_instance--task_instance_note - -1 -1 + +1 +1 - + task_instance_history - -task_instance_history - -task_instance_id - - [UUID] - NOT NULL - -context_carrier - - [JSONB] - -custom_operator_name - - [VARCHAR(1000)] - -dag_id - - [VARCHAR(250)] - NOT NULL - -dag_version_id - - [UUID] - -duration - - [DOUBLE_PRECISION] - -end_date - - [TIMESTAMP] - -executor - - [VARCHAR(1000)] - -executor_config - - [BYTEA] - -external_executor_id - - [VARCHAR(250)] - -hostname - - [VARCHAR(1000)] - -map_index - - [INTEGER] - NOT NULL - -max_tries - - [INTEGER] - -next_kwargs - - [JSONB] - -next_method - - [VARCHAR(1000)] - -operator - - [VARCHAR(1000)] - -pid - - [INTEGER] - -pool - - [VARCHAR(256)] - NOT NULL - -pool_slots - - [INTEGER] - NOT NULL - -priority_weight - - [INTEGER] - -queue - - [VARCHAR(256)] - -queued_by_job_id - - [INTEGER] - -queued_dttm - - [TIMESTAMP] - -rendered_map_index - - [VARCHAR(250)] - -run_id - - [VARCHAR(250)] - NOT NULL - -scheduled_dttm - - [TIMESTAMP] - -span_status - - [VARCHAR(250)] - NOT NULL - -start_date - - [TIMESTAMP] - -state - - [VARCHAR(20)] - -task_display_name - - [VARCHAR(2000)] - -task_id - - [VARCHAR(250)] - NOT NULL - -trigger_id - - [INTEGER] - -trigger_timeout - - [TIMESTAMP] - -try_number - - [INTEGER] - NOT NULL - -unixname - - [VARCHAR(1000)] - -updated_at - - [TIMESTAMP] + +task_instance_history + +task_instance_id + + [UUID] + NOT NULL + +context_carrier + + [JSONB] + +custom_operator_name + + [VARCHAR(1000)] + +dag_id + + [VARCHAR(250)] + NOT NULL + +dag_version_id + + [UUID] + +duration + + [DOUBLE_PRECISION] + +end_date + + [TIMESTAMP] + +executor + + [VARCHAR(1000)] + +executor_config + + [BYTEA] + +external_executor_id + + [VARCHAR(250)] + +hostname + + [VARCHAR(1000)] + +map_index + + [INTEGER] + NOT NULL + +max_tries + + [INTEGER] + +next_kwargs + + [JSONB] + +next_method + + [VARCHAR(1000)] + +operator + + [VARCHAR(1000)] + +pid + + [INTEGER] + +pool + + [VARCHAR(256)] + NOT NULL + +pool_slots + + [INTEGER] + NOT NULL + +priority_weight + + [INTEGER] + +queue + + [VARCHAR(256)] + +queued_by_job_id + + [INTEGER] + +queued_dttm + + [TIMESTAMP] + +rendered_map_index + + [VARCHAR(250)] + +run_id + + [VARCHAR(250)] + NOT NULL + +scheduled_dttm + + [TIMESTAMP] + +span_status + + [VARCHAR(250)] + NOT NULL + +start_date + + [TIMESTAMP] + +state + + [VARCHAR(20)] + +task_display_name + + [VARCHAR(2000)] + +task_id + + [VARCHAR(250)] + NOT NULL + +trigger_id + + [INTEGER] + +trigger_timeout + + [TIMESTAMP] + +try_number + + [INTEGER] + NOT NULL + +unixname + + [VARCHAR(1000)] + +updated_at + + [TIMESTAMP] task_instance--task_instance_history - -0..N -1 + +0..N +1 task_instance--task_instance_history - -0..N -1 + +0..N +1 task_instance--task_instance_history - -0..N -1 + +0..N +1 task_instance--task_instance_history - -0..N -1 + +0..N +1 - + rendered_task_instance_fields - -rendered_task_instance_fields - -dag_id - - [VARCHAR(250)] - NOT NULL - -map_index - - [INTEGER] - NOT NULL - -run_id - - [VARCHAR(250)] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -k8s_pod_yaml - - [JSON] - -rendered_fields - - [JSON] - NOT NULL + +rendered_task_instance_fields + +dag_id + + [VARCHAR(250)] + NOT NULL + +map_index + + [INTEGER] + NOT NULL + +run_id + + [VARCHAR(250)] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +k8s_pod_yaml + + [JSON] + +rendered_fields + + [JSON] + NOT NULL task_instance--rendered_task_instance_fields - -0..N -1 + +0..N +1 task_instance--rendered_task_instance_fields - -0..N -1 + +0..N +1 task_instance--rendered_task_instance_fields - -0..N -1 + +0..N +1 task_instance--rendered_task_instance_fields - -0..N -1 + +0..N +1 - + dag_bundle - -dag_bundle - -name - - [VARCHAR(250)] - NOT NULL - -active - - [BOOLEAN] - -last_refreshed - - [TIMESTAMP] - -version - - [VARCHAR(200)] + +dag_bundle + +name + + [VARCHAR(250)] + NOT NULL + +active + + [BOOLEAN] + +last_refreshed + + [TIMESTAMP] + +version + + [VARCHAR(200)] - + dag - -dag - -dag_id - - [VARCHAR(250)] - NOT NULL - -asset_expression - - [JSON] - -bundle_name - - [VARCHAR(250)] - -bundle_version - - [VARCHAR(200)] - -dag_display_name - - [VARCHAR(2000)] - -deadline - - [JSON] - -description - - [TEXT] - -fileloc - - [VARCHAR(2000)] - -has_import_errors - - [BOOLEAN] - -has_task_concurrency_limits - - [BOOLEAN] - NOT NULL - -is_paused - - [BOOLEAN] - -is_stale - - [BOOLEAN] - -last_expired - - [TIMESTAMP] - -last_parsed_time - - [TIMESTAMP] - -max_active_runs - - [INTEGER] - -max_active_tasks - - [INTEGER] - NOT NULL - -max_consecutive_failed_dag_runs - - [INTEGER] - NOT NULL - -next_dagrun - - [TIMESTAMP] - -next_dagrun_create_after - - [TIMESTAMP] - -next_dagrun_data_interval_end - - [TIMESTAMP] - -next_dagrun_data_interval_start - - [TIMESTAMP] - -owners - - [VARCHAR(2000)] - -relative_fileloc - - [VARCHAR(2000)] - -timetable_description - - [VARCHAR(1000)] - -timetable_summary - - [TEXT] + +dag + +dag_id + + [VARCHAR(250)] + NOT NULL + +asset_expression + + [JSON] + +bundle_name + + [VARCHAR(250)] + +bundle_version + + [VARCHAR(200)] + +dag_display_name + + [VARCHAR(2000)] + +deadline + + [JSON] + +description + + [TEXT] + +fileloc + + [VARCHAR(2000)] + +has_import_errors + + [BOOLEAN] + +has_task_concurrency_limits + + [BOOLEAN] + NOT NULL + +is_paused + + [BOOLEAN] + +is_stale + + [BOOLEAN] + +last_expired + + [TIMESTAMP] + +last_parsed_time + + [TIMESTAMP] + +max_active_runs + + [INTEGER] + +max_active_tasks + + [INTEGER] + NOT NULL + +max_consecutive_failed_dag_runs + + [INTEGER] + NOT NULL + +next_dagrun + + [TIMESTAMP] + +next_dagrun_create_after + + [TIMESTAMP] + +next_dagrun_data_interval_end + + [TIMESTAMP] + +next_dagrun_data_interval_start + + [TIMESTAMP] + +owners + + [VARCHAR(2000)] + +relative_fileloc + + [VARCHAR(2000)] + +timetable_description + + [VARCHAR(1000)] + +timetable_summary + + [TEXT] dag_bundle--dag - -0..N -{0,1} + +0..N +{0,1} dag--dag_schedule_asset_alias_reference - -0..N -1 + +0..N +1 dag--dag_schedule_asset_reference - -0..N -1 + +0..N +1 dag--task_outlet_asset_reference - -0..N -1 + +0..N +1 dag--task_inlet_asset_reference - -0..N -1 + +0..N +1 dag--asset_dag_run_queue - -0..N -1 + +0..N +1 - + dag_schedule_asset_name_reference - -dag_schedule_asset_name_reference - -dag_id - - [VARCHAR(250)] - NOT NULL - -name - - [VARCHAR(1500)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL + +dag_schedule_asset_name_reference + +dag_id + + [VARCHAR(250)] + NOT NULL + +name + + [VARCHAR(1500)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL dag--dag_schedule_asset_name_reference - -0..N -1 + +0..N +1 - + dag_schedule_asset_uri_reference - -dag_schedule_asset_uri_reference - -dag_id - - [VARCHAR(250)] - NOT NULL - -uri - - [VARCHAR(1500)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL + +dag_schedule_asset_uri_reference + +dag_id + + [VARCHAR(250)] + NOT NULL + +uri + + [VARCHAR(1500)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL dag--dag_schedule_asset_uri_reference - -0..N -1 + +0..N +1 - + dag_version - -dag_version - -id - - [UUID] - NOT NULL - -bundle_name - - [VARCHAR(250)] - -bundle_version - - [VARCHAR(250)] - -created_at - - [TIMESTAMP] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -last_updated - - [TIMESTAMP] - NOT NULL - -version_number - - [INTEGER] - NOT NULL + +dag_version + +id + + [UUID] + NOT NULL + +bundle_name + + [VARCHAR(250)] + +bundle_version + + [VARCHAR(250)] + +created_at + + [TIMESTAMP] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +last_updated + + [TIMESTAMP] + NOT NULL + +version_number + + [INTEGER] + NOT NULL dag--dag_version - -0..N -1 + +0..N +1 - + dag_tag - -dag_tag - -dag_id - - [VARCHAR(250)] - NOT NULL - -name - - [VARCHAR(100)] - NOT NULL + +dag_tag + +dag_id + + [VARCHAR(250)] + NOT NULL + +name + + [VARCHAR(100)] + NOT NULL dag--dag_tag - -0..N -1 + +0..N +1 - + dag_owner_attributes - -dag_owner_attributes - -dag_id - - [VARCHAR(250)] - NOT NULL - -owner - - [VARCHAR(500)] - NOT NULL - -link - - [VARCHAR(500)] - NOT NULL + +dag_owner_attributes + +dag_id + + [VARCHAR(250)] + NOT NULL + +owner + + [VARCHAR(500)] + NOT NULL + +link + + [VARCHAR(500)] + NOT NULL dag--dag_owner_attributes - -0..N -1 + +0..N +1 - + dag_warning - -dag_warning - -dag_id - - [VARCHAR(250)] - NOT NULL - -warning_type - - [VARCHAR(50)] - NOT NULL - -message - - [TEXT] - NOT NULL - -timestamp - - [TIMESTAMP] - NOT NULL + +dag_warning + +dag_id + + [VARCHAR(250)] + NOT NULL + +warning_type + + [VARCHAR(50)] + NOT NULL + +message + + [TEXT] + NOT NULL + +timestamp + + [TIMESTAMP] + NOT NULL dag--dag_warning - -0..N -1 + +0..N +1 - + dag_favorite - -dag_favorite - -dag_id - - [VARCHAR(250)] - NOT NULL - -user_id - - [VARCHAR(250)] - NOT NULL + +dag_favorite + +dag_id + + [VARCHAR(250)] + NOT NULL + +user_id + + [VARCHAR(250)] + NOT NULL dag--dag_favorite - -0..N -1 + +0..N +1 - + deadline - -deadline - -id - - [UUID] - NOT NULL - -callback - - [VARCHAR(500)] - NOT NULL - -callback_kwargs - - [JSON] - -dag_id - - [VARCHAR(250)] - -dagrun_id - - [INTEGER] - -deadline_time - - [TIMESTAMP] - NOT NULL + +deadline + +id + + [UUID] + NOT NULL + +callback + + [VARCHAR(500)] + NOT NULL + +callback_kwargs + + [JSON] + +dag_id + + [VARCHAR(250)] + +dagrun_id + + [INTEGER] + +deadline_time + + [TIMESTAMP] + NOT NULL dag--deadline - -0..N -{0,1} + +0..N +{0,1} dag_version--task_instance +<<<<<<< HEAD 0..N 1 +======= + +0..N +{0,1} +>>>>>>> fc78b17140f (feat: AIP-90 PoC) - + dag_run - -dag_run - -id - - [INTEGER] - NOT NULL - -backfill_id - - [INTEGER] - -bundle_version - - [VARCHAR(250)] - -clear_number - - [INTEGER] - NOT NULL - -conf - - [JSONB] - -context_carrier - - [JSONB] - -created_dag_version_id - - [UUID] - -creating_job_id - - [INTEGER] - -dag_id - - [VARCHAR(250)] - NOT NULL - -data_interval_end - - [TIMESTAMP] - -data_interval_start - - [TIMESTAMP] - -end_date - - [TIMESTAMP] - -last_scheduling_decision - - [TIMESTAMP] - -log_template_id - - [INTEGER] - -logical_date - - [TIMESTAMP] - -queued_at - - [TIMESTAMP] - -run_after - - [TIMESTAMP] - NOT NULL - -run_id - - [VARCHAR(250)] - NOT NULL - -run_type - - [VARCHAR(50)] - NOT NULL - -scheduled_by_job_id - - [INTEGER] - -span_status - - [VARCHAR(250)] - NOT NULL - -start_date - - [TIMESTAMP] - -state - - [VARCHAR(50)] - -triggered_by - - [VARCHAR(50)] - -triggering_user_name - - [VARCHAR(512)] - -updated_at - - [TIMESTAMP] + +dag_run + +id + + [INTEGER] + NOT NULL + +backfill_id + + [INTEGER] + +bundle_version + + [VARCHAR(250)] + +clear_number + + [INTEGER] + NOT NULL + +conf + + [JSONB] + +context_carrier + + [JSONB] + +created_dag_version_id + + [UUID] + +creating_job_id + + [INTEGER] + +dag_id + + [VARCHAR(250)] + NOT NULL + +data_interval_end + + [TIMESTAMP] + +data_interval_start + + [TIMESTAMP] + +end_date + + [TIMESTAMP] + +last_scheduling_decision + + [TIMESTAMP] + +log_template_id + + [INTEGER] + +logical_date + + [TIMESTAMP] + +queued_at + + [TIMESTAMP] + +run_after + + [TIMESTAMP] + NOT NULL + +run_id + + [VARCHAR(250)] + NOT NULL + +run_type + + [VARCHAR(50)] + NOT NULL + +scheduled_by_job_id + + [INTEGER] + +span_status + + [VARCHAR(250)] + NOT NULL + +start_date + + [TIMESTAMP] + +state + + [VARCHAR(50)] + +triggered_by + + [VARCHAR(50)] + +triggering_user_name + + [VARCHAR(512)] + +updated_at + + [TIMESTAMP] dag_version--dag_run - -0..N -{0,1} + +0..N +{0,1} - + dag_code - -dag_code - -id - - [UUID] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -dag_version_id - - [UUID] - NOT NULL - -fileloc - - [VARCHAR(2000)] - NOT NULL - -last_updated - - [TIMESTAMP] - NOT NULL - -source_code - - [TEXT] - NOT NULL - -source_code_hash - - [VARCHAR(32)] - NOT NULL + +dag_code + +id + + [UUID] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +dag_version_id + + [UUID] + NOT NULL + +fileloc + + [VARCHAR(2000)] + NOT NULL + +last_updated + + [TIMESTAMP] + NOT NULL + +source_code + + [TEXT] + NOT NULL + +source_code_hash + + [VARCHAR(32)] + NOT NULL dag_version--dag_code - -0..N -1 + +0..N +1 - + serialized_dag - -serialized_dag - -id - - [UUID] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -dag_hash - - [VARCHAR(32)] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -dag_version_id - - [UUID] - NOT NULL - -data - - [JSON] - -data_compressed - - [BYTEA] - -last_updated - - [TIMESTAMP] - NOT NULL + +serialized_dag + +id + + [UUID] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +dag_hash + + [VARCHAR(32)] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +dag_version_id + + [UUID] + NOT NULL + +data + + [JSON] + +data_compressed + + [BYTEA] + +last_updated + + [TIMESTAMP] + NOT NULL dag_version--serialized_dag - -0..N -1 + +0..N +1 dag_run--dagrun_asset_event - -0..N -1 + +0..N +1 dag_run--task_instance - -0..N -1 + +0..N +1 dag_run--task_instance - -0..N -1 + +0..N +1 dag_run--deadline - -0..N -{0,1} + +0..N +{0,1} - + backfill_dag_run - -backfill_dag_run - -id - - [INTEGER] - NOT NULL - -backfill_id - - [INTEGER] - NOT NULL - -dag_run_id - - [INTEGER] - -exception_reason - - [VARCHAR(250)] - -logical_date - - [TIMESTAMP] - NOT NULL - -sort_ordinal - - [INTEGER] - NOT NULL + +backfill_dag_run + +id + + [INTEGER] + NOT NULL + +backfill_id + + [INTEGER] + NOT NULL + +dag_run_id + + [INTEGER] + +exception_reason + + [VARCHAR(250)] + +logical_date + + [TIMESTAMP] + NOT NULL + +sort_ordinal + + [INTEGER] + NOT NULL dag_run--backfill_dag_run - -0..N -{0,1} + +0..N +{0,1} - + dag_run_note - -dag_run_note - -dag_run_id - - [INTEGER] - NOT NULL - -content - - [VARCHAR(1000)] - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL - -user_id - - [VARCHAR(128)] + +dag_run_note + +dag_run_id + + [INTEGER] + NOT NULL + +content + + [VARCHAR(1000)] + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL + +user_id + + [VARCHAR(128)] dag_run--dag_run_note - -1 -1 + +1 +1 - + log_template - -log_template - -id - - [INTEGER] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -elasticsearch_id - - [TEXT] - NOT NULL - -filename - - [TEXT] - NOT NULL + +log_template + +id + + [INTEGER] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +elasticsearch_id + + [TEXT] + NOT NULL + +filename + + [TEXT] + NOT NULL log_template--dag_run - -0..N -{0,1} + +0..N +{0,1} - + backfill - -backfill - -id - - [INTEGER] - NOT NULL - -completed_at - - [TIMESTAMP] - -created_at - - [TIMESTAMP] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -dag_run_conf - - [JSON] - NOT NULL - -from_date - - [TIMESTAMP] - NOT NULL - -is_paused - - [BOOLEAN] - -max_active_runs - - [INTEGER] - NOT NULL - -reprocess_behavior - - [VARCHAR(250)] - NOT NULL - -to_date - - [TIMESTAMP] - NOT NULL - -triggering_user_name - - [VARCHAR(512)] - -updated_at - - [TIMESTAMP] - NOT NULL + +backfill + +id + + [INTEGER] + NOT NULL + +completed_at + + [TIMESTAMP] + +created_at + + [TIMESTAMP] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +dag_run_conf + + [JSON] + NOT NULL + +from_date + + [TIMESTAMP] + NOT NULL + +is_paused + + [BOOLEAN] + +max_active_runs + + [INTEGER] + NOT NULL + +reprocess_behavior + + [VARCHAR(250)] + NOT NULL + +to_date + + [TIMESTAMP] + NOT NULL + +triggering_user_name + + [VARCHAR(512)] + +updated_at + + [TIMESTAMP] + NOT NULL backfill--dag_run - -0..N -{0,1} + +0..N +{0,1} backfill--backfill_dag_run - -0..N -1 + +0..N +1 - + alembic_version - -alembic_version - -version_num - - [VARCHAR(32)] - NOT NULL + +alembic_version + +version_num + + [VARCHAR(32)] + NOT NULL diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/routes/__init__.py b/airflow-core/src/airflow/api_fastapi/execution_api/routes/__init__.py index 164c3f0942d1f..383fc48ba8ee9 100644 --- a/airflow-core/src/airflow/api_fastapi/execution_api/routes/__init__.py +++ b/airflow-core/src/airflow/api_fastapi/execution_api/routes/__init__.py @@ -26,6 +26,7 @@ connections, dag_runs, health, + hitl, task_instances, task_reschedules, variables, @@ -48,5 +49,7 @@ ) authenticated_router.include_router(variables.router, prefix="/variables", tags=["Variables"]) authenticated_router.include_router(xcoms.router, prefix="/xcoms", tags=["XComs"]) +authenticated_router.include_router(hitl.router, prefix="/hitl-responses", tags=["Human in the Loop"]) + execution_api_router.include_router(authenticated_router) diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/routes/hitl.py b/airflow-core/src/airflow/api_fastapi/execution_api/routes/hitl.py new file mode 100644 index 0000000000000..3bd7e67558324 --- /dev/null +++ b/airflow-core/src/airflow/api_fastapi/execution_api/routes/hitl.py @@ -0,0 +1,125 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from datetime import datetime, timezone +from uuid import UUID + +import structlog +from fastapi import APIRouter, HTTPException, status +from sqlalchemy import select + +from airflow.api_fastapi.common.db.common import SessionDep +from airflow.providers.standard.api_fastapi.execution_api.datamodels.hitl import ( + HITLInputRequestResponse, +) +from airflow.providers.standard.execution_time.comms import ( + CreateHITLResponsePayload, + HITLResponseContentDetail, + UpdateHITLResponse, +) +from airflow.providers.standard.models import HITLResponseModel + +router = APIRouter() + +log = structlog.get_logger(__name__) + + +@router.post( + "/{task_instance_id}", + status_code=status.HTTP_201_CREATED, +) +def add_hitl_input_request( + task_instance_id: UUID, + payload: CreateHITLResponsePayload, + session: SessionDep, +) -> HITLInputRequestResponse: + """Get Human-in-the-loop Response for a specific Task Instance.""" + ti_id_str = str(task_instance_id) + hitl_response_model = session.scalar( + select(HITLResponseModel).where(HITLResponseModel.ti_id == ti_id_str) + ) + if hitl_response_model: + raise HTTPException( + status.HTTP_409_CONFLICT, + f"Human-in-the-loop Input Request for Task Instance with id {ti_id_str} already exists.", + ) + + hitl_input_request = HITLResponseModel( + ti_id=ti_id_str, + options=payload.options, + subject=payload.subject, + body=payload.body, + default=payload.default, + multiple=payload.multiple, + params=payload.params, + ) + session.add(hitl_input_request) + session.commit() + return HITLInputRequestResponse.model_validate(hitl_input_request) + + +@router.patch("/{task_instance_id}") +def update_hitl_response( + task_instance_id: UUID, + payload: UpdateHITLResponse, + session: SessionDep, +) -> HITLResponseContentDetail: + """Get Human-in-the-loop Response for a specific Task Instance.""" + ti_id_str = str(task_instance_id) + hitl_response_model = session.execute( + select(HITLResponseModel).where(HITLResponseModel.ti_id == ti_id_str) + ).scalar() + if hitl_response_model.response_received: + raise HTTPException( + status.HTTP_409_CONFLICT, + f"Human-in-the-loop Response Content for Task Instance with id {ti_id_str} already exists.", + ) + + hitl_response_model.user_id = "fallback to default" + hitl_response_model.response_content = payload.response_content + hitl_response_model.params_input = payload.params_input + hitl_response_model.response_at = datetime.now(timezone.utc) + session.add(hitl_response_model) + session.commit() + return HITLResponseContentDetail( + response_received=hitl_response_model.response_received, + response_at=hitl_response_model.response_at, + user_id=hitl_response_model.user_id, + response_content=hitl_response_model.response_content, + ) + + +@router.get( + "/{task_instance_id}", + status_code=status.HTTP_200_OK, +) +def get_hitl_response( + task_instance_id: UUID, + session: SessionDep, +) -> HITLResponseContentDetail: + """Get Human-in-the-loop Response for a specific Task Instance.""" + ti_id_str = str(task_instance_id) + hitl_response_model = session.execute( + select(HITLResponseModel).where(HITLResponseModel.ti_id == ti_id_str) + ).scalar() + return HITLResponseContentDetail( + response_received=hitl_response_model.response_received, + response_at=hitl_response_model.response_at, + user_id=hitl_response_model.user_id, + response_content=hitl_response_model.response_content, + ) diff --git a/airflow-core/src/airflow/jobs/triggerer_job_runner.py b/airflow-core/src/airflow/jobs/triggerer_job_runner.py index fd02baecb7176..f91f326b1dd80 100644 --- a/airflow-core/src/airflow/jobs/triggerer_job_runner.py +++ b/airflow-core/src/airflow/jobs/triggerer_job_runner.py @@ -43,6 +43,7 @@ from airflow.jobs.base_job_runner import BaseJobRunner from airflow.jobs.job import perform_heartbeat from airflow.models.trigger import Trigger +from airflow.providers.standard.api.client import UpdateHITLResponse from airflow.sdk.execution_time.comms import ( CommsDecoder, ConnectionResult, @@ -72,6 +73,19 @@ from airflow.utils.module_loading import import_string from airflow.utils.session import provide_session +# TODO: Remove this block once we can make the execution API pluggable. +try: + from airflow.providers.standard.execution_time.comms import ( + GetHITLResponseContentDetail, + HITLResponseContentDetailResult, + UpdateHITLResponse, + ) +except ModuleNotFoundError: + GetHITLResponseContentDetail = object # type: ignore[misc, assignment] + UpdateHITLResponse = object # type: ignore[misc, assignment] + HITLResponseContentDetailResult = object # type: ignore[misc, assignment] + + if TYPE_CHECKING: from sqlalchemy.orm import Session from structlog.typing import FilteringBoundLogger, WrappedLogger @@ -219,6 +233,7 @@ class TriggerStateSync(BaseModel): | DRCount | TICount | TaskStatesResult + | HITLResponseContentDetailResult | ErrorResponse, Field(discriminator="type"), ] @@ -236,7 +251,9 @@ class TriggerStateSync(BaseModel): | GetTICount | GetTaskStates | GetDagRunState - | GetDRCount, + | GetDRCount + | GetHITLResponseContentDetail + | UpdateHITLResponse, Field(discriminator="type"), ] """ @@ -448,6 +465,24 @@ def _handle_request(self, msg: ToTriggerSupervisor, log: FilteringBoundLogger, r resp = TaskStatesResult.from_api_response(run_id_task_state_map) else: resp = run_id_task_state_map + # TODO: Remove this block once we can make the execution API pluggable. + elif issubclass(UpdateHITLResponse, BaseModel) and isinstance(msg, UpdateHITLResponse): + if TYPE_CHECKING: + assert HITLResponseContentDetailResult is not None + api_resp = self.client.hitl.update_response( + ti_id=msg.ti_id, + response_content=msg.response_content, + params_input=msg.params_input, + ) + resp = HITLResponseContentDetailResult.from_api_response(response=api_resp) + # TODO: Remove this block once we can make the execution API pluggable. + elif issubclass(GetHITLResponseContentDetail, BaseModel) and isinstance( + msg, GetHITLResponseContentDetail + ): + if TYPE_CHECKING: + assert HITLResponseContentDetailResult is not None + api_resp = self.client.hitl.get_response_content_detail(ti_id=msg.ti_id) + resp = HITLResponseContentDetailResult.from_api_response(response=api_resp) else: raise ValueError(f"Unknown message type {type(msg)}") diff --git a/airflow-core/src/airflow/utils/db.py b/airflow-core/src/airflow/utils/db.py index 8666deac458d4..da281fd201810 100644 --- a/airflow-core/src/airflow/utils/db.py +++ b/airflow-core/src/airflow/utils/db.py @@ -20,6 +20,7 @@ import collections.abc import contextlib import enum +import importlib import itertools import json import logging @@ -1058,14 +1059,24 @@ def downgrade(*, to_revision, from_revision=None, show_sql_only=False, session: if _revision_greater(config, _REVISION_HEADS_MAP["2.10.3"], to_revision): unitest_mode = conf.getboolean("core", "unit_test_mode") if unitest_mode: - try: - from airflow.providers.fab.auth_manager.models.db import FABDBManager + from packaging.version import Version + + from airflow import __version__ + + external_db_mangers = [("airflow.providers.fab.auth_manager.models.db", "FABDBManager")] + if Version(__version__) >= Version("3.1.0"): + external_db_mangers.append(("airflow.providers.standard.models.db", "HITLDBManager")) + + for module_path, cls_name in external_db_mangers: + try: + mangaer_module = importlib.import_module(module_path) + manager_obj = getattr(mangaer_module, cls_name) + dbm = manager_obj(session) + dbm.initdb() + except ImportError: + log.warning("Import error occurred while importing %s. Skipping the check.", cls_name) + return - dbm = FABDBManager(session) - dbm.initdb() - except ImportError: - log.warning("Import error occurred while importing FABDBManager. Skipping the check.") - return if not inspect(settings.engine).has_table("ab_user") and not unitest_mode: raise AirflowException( "Downgrade to revision less than 3.0.0 requires that `ab_user` table is present. " diff --git a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_plugins.py b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_plugins.py index 2b84d441b3f96..788ac22d85a2b 100644 --- a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_plugins.py +++ b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_plugins.py @@ -33,7 +33,7 @@ class TestGetPlugins: # Filters ( {}, - 13, + 14, [ "MetadataCollectionPlugin", "OpenLineageProviderPlugin", @@ -46,16 +46,17 @@ class TestGetPlugins: "plugin-c", "postload", "priority_weight_strategy_plugin", + "standard_hitl", "test_plugin", "workday_timetable_plugin", ], ), ( {"limit": 3, "offset": 2}, - 13, + 14, ["databricks_workflow", "decreasing_priority_weight_strategy_plugin", "edge_executor"], ), - ({"limit": 1}, 13, ["MetadataCollectionPlugin"]), + ({"limit": 1}, 14, ["MetadataCollectionPlugin"]), ], ) def test_should_respond_200( diff --git a/airflow-core/tests/unit/plugins/test_plugins_manager.py b/airflow-core/tests/unit/plugins/test_plugins_manager.py index ae65bb2e79012..b145c716e2f19 100644 --- a/airflow-core/tests/unit/plugins/test_plugins_manager.py +++ b/airflow-core/tests/unit/plugins/test_plugins_manager.py @@ -351,7 +351,7 @@ def test_does_not_double_import_entrypoint_provider_plugins(self): assert len(plugins_manager.plugins) == 0 plugins_manager.load_entrypoint_plugins() plugins_manager.load_providers_plugins() - assert len(plugins_manager.plugins) == 4 + assert len(plugins_manager.plugins) == 5 class TestPluginsDirectorySource: diff --git a/airflow-core/tests/unit/utils/test_db.py b/airflow-core/tests/unit/utils/test_db.py index cf69275025330..51d722b4c2e81 100644 --- a/airflow-core/tests/unit/utils/test_db.py +++ b/airflow-core/tests/unit/utils/test_db.py @@ -37,6 +37,7 @@ from airflow.exceptions import AirflowException from airflow.models import Base as airflow_base from airflow.providers.fab.auth_manager.models.db import FABDBManager +from airflow.providers.standard.models.db import HITLDBManager from airflow.settings import engine from airflow.utils.db import ( _REVISION_HEADS_MAP, @@ -74,6 +75,9 @@ def test_database_schema_and_sqlalchemy_model_are_in_sync(self): # test FAB models for table_name, table in FABDBManager.metadata.tables.items(): all_meta_data._add_table(table_name, table.schema, table) + # test Human-in-the-loop models + for table_name, table in HITLDBManager.metadata.tables.items(): + all_meta_data._add_table(table_name, table.schema, table) # create diff between database schema and SQLAlchemy model mctx = MigrationContext.configure( engine.connect(), @@ -99,6 +103,8 @@ def test_database_schema_and_sqlalchemy_model_are_in_sync(self): lambda t: (t[0] == "remove_table" and t[1].name == "sqlite_sequence"), # fab version table lambda t: (t[0] == "remove_table" and t[1].name == "alembic_version_fab"), + # hitl version table + lambda t: (t[0] == "remove_table" and t[1].name == "alembic_version_hitl"), # Ignore _xcom_archive table lambda t: (t[0] == "remove_table" and t[1].name == "_xcom_archive"), ] diff --git a/providers/standard/provider.yaml b/providers/standard/provider.yaml index f932014c94073..b0f083674f16b 100644 --- a/providers/standard/provider.yaml +++ b/providers/standard/provider.yaml @@ -69,6 +69,7 @@ operators: - airflow.providers.standard.operators.latest_only - airflow.providers.standard.operators.smooth - airflow.providers.standard.operators.branch + - airflow.providers.standard.operators.hitl sensors: - integration-name: Standard python-modules: @@ -93,6 +94,11 @@ triggers: - airflow.providers.standard.triggers.external_task - airflow.providers.standard.triggers.file - airflow.providers.standard.triggers.temporal + - airflow.providers.standard.triggers.hitl + +plugins: + - name: standard_hitl + plugin-class: airflow.providers.standard.plugins.hitl.HumanInTheLoopPlugin extra-links: - airflow.providers.standard.operators.trigger_dagrun.TriggerDagRunLink diff --git a/providers/standard/pyproject.toml b/providers/standard/pyproject.toml index 6d73aaed13329..391a7dffdc6a9 100644 --- a/providers/standard/pyproject.toml +++ b/providers/standard/pyproject.toml @@ -105,5 +105,8 @@ apache-airflow-providers-standard = {workspace = true} [project.entry-points."apache_airflow_provider"] provider_info = "airflow.providers.standard.get_provider_info:get_provider_info" +[project.entry-points."airflow.plugins"] +standard_hitl = "airflow.providers.standard.plugins.hitl:HumanInTheLoopPlugin" + [tool.flit.module] name = "airflow.providers.standard" diff --git a/providers/standard/src/airflow/providers/standard/alembic.ini b/providers/standard/src/airflow/providers/standard/alembic.ini new file mode 100644 index 0000000000000..75d42ee16d3b9 --- /dev/null +++ b/providers/standard/src/airflow/providers/standard/alembic.ini @@ -0,0 +1,133 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# A generic, single database configuration. + +[alembic] +# path to migration scripts +# Use forward slashes (/) also on windows to provide an os agnostic path +script_location = %(here)s/migrations + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python>=3.9 or backports.zoneinfo library. +# Any required deps can installed by adding `alembic[tz]` to the pip requirements +# string value is passed to ZoneInfo() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to alembic/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = scheme://localhost/airflow + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# lint with attempts to fix using "ruff" - use the exec runner, execute a binary +# hooks = ruff +# ruff.type = exec +# ruff.executable = %(here)s/.venv/bin/ruff +# ruff.options = --fix REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/providers/standard/src/airflow/providers/standard/api/__init__.py b/providers/standard/src/airflow/providers/standard/api/__init__.py new file mode 100644 index 0000000000000..920412ab9cba2 --- /dev/null +++ b/providers/standard/src/airflow/providers/standard/api/__init__.py @@ -0,0 +1,39 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE +# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES. +# +# IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE +# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY +# +from __future__ import annotations + +import packaging.version + +from airflow import __version__ as airflow_version + +__all__ = ["__version__"] + +__version__ = "1.3.0" + +if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse( + "2.10.0" +): + raise RuntimeError( + f"The package `apache-airflow-providers-standard:{__version__}` needs Apache Airflow 2.10.0+" + ) diff --git a/providers/standard/src/airflow/providers/standard/api/client.py b/providers/standard/src/airflow/providers/standard/api/client.py new file mode 100644 index 0000000000000..fae8aacfa61cc --- /dev/null +++ b/providers/standard/src/airflow/providers/standard/api/client.py @@ -0,0 +1,97 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import uuid +from collections.abc import MutableMapping +from typing import TYPE_CHECKING + +from airflow.providers.standard.api_fastapi.execution_api.datamodels.hitl import ( + HITLResponseContentDetail, +) +from airflow.providers.standard.execution_time.comms import ( + CreateHITLResponsePayload, + HITLInputRequestResponseResult, + UpdateHITLResponse, +) + +if TYPE_CHECKING: + from airflow.sdk.api.client import Client + + +class HITLOperations: + """ + Operations related to Human in the loop. Require Airflow 3.1+. + + :meta: private + """ + + __slots__ = ("client",) + + def __init__(self, client: Client) -> None: + self.client = client + + def add_response( + self, + *, + ti_id: uuid.UUID, + options: list[str], + subject: str, + body: str | None = None, + default: list[str] | None = None, + multiple: bool = False, + params: MutableMapping | None = None, + ) -> HITLInputRequestResponseResult: + """Add a Human-in-the-loop response that waits for human response for a specific Task Instance.""" + payload = CreateHITLResponsePayload( + ti_id=ti_id, + options=options, + subject=subject, + body=body, + default=default, + multiple=multiple, + params=params, + ) + resp = self.client.post( + f"/hitl-responses/{ti_id}", + content=payload.model_dump_json(), + ) + return HITLInputRequestResponseResult.model_validate_json(resp.read()) + + def update_response( + self, + *, + ti_id: uuid.UUID, + response_content: str, + params_input: MutableMapping | None = None, + ) -> HITLResponseContentDetail: + """Update an existing Human-in-the-loop response.""" + payload = UpdateHITLResponse( + ti_id=ti_id, + response_content=response_content, + params_input=params_input, + ) + resp = self.client.patch( + f"/hitl-responses/{ti_id}", + content=payload.model_dump_json(), + ) + return HITLResponseContentDetail.model_validate_json(resp.read()) + + def get_response_content_detail(self, ti_id: uuid.UUID) -> HITLResponseContentDetail: + """Get content part of a Human-in-the-loop response for a specific Task Instance.""" + resp = self.client.get(f"/hitl-responses/{ti_id}") + return HITLResponseContentDetail.model_validate_json(resp.read()) diff --git a/providers/standard/src/airflow/providers/standard/api_fastapi/__init__.py b/providers/standard/src/airflow/providers/standard/api_fastapi/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/providers/standard/src/airflow/providers/standard/api_fastapi/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/providers/standard/src/airflow/providers/standard/api_fastapi/core_api/__init__.py b/providers/standard/src/airflow/providers/standard/api_fastapi/core_api/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/providers/standard/src/airflow/providers/standard/api_fastapi/core_api/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/providers/standard/src/airflow/providers/standard/api_fastapi/core_api/datamodels/__init__.py b/providers/standard/src/airflow/providers/standard/api_fastapi/core_api/datamodels/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/providers/standard/src/airflow/providers/standard/api_fastapi/core_api/datamodels/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/providers/standard/src/airflow/providers/standard/api_fastapi/core_api/datamodels/hitl.py b/providers/standard/src/airflow/providers/standard/api_fastapi/core_api/datamodels/hitl.py new file mode 100644 index 0000000000000..9ab104436aa89 --- /dev/null +++ b/providers/standard/src/airflow/providers/standard/api_fastapi/core_api/datamodels/hitl.py @@ -0,0 +1,75 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from collections.abc import MutableMapping +from datetime import datetime + +from pydantic import field_validator + +from airflow.api_fastapi.core_api.base import BaseModel + + +class UpdateHITLResponsePayload(BaseModel): + """Schema for updating the content of a Human-in-the-loop response.""" + + response_content: str + + +class HITLResponseContentDetail(BaseModel): + """Response of updating a Human-in-the-loop response.""" + + response_content: str + response_at: datetime + user_id: str + + +class HITLResponseDetail(BaseModel): + """Schema for Human-in-the-loop response.""" + + ti_id: str + + # Input Request + options: list[str] + subject: str + body: str | None = None + default: list[str] | None = None + multiple: bool = False + params: MutableMapping | None = None + + # Response Content Detail + response_at: datetime | None = None + user_id: str | None = None + response_content: str | None = None + params_input: MutableMapping | None = None + + response_received: bool = False + + @field_validator("params", mode="before") + @classmethod + def get_params(cls, params: MutableMapping | None) -> dict | None: + """Convert params attribute to dict representation.""" + if params is None: + return None + return {k: v.dump() for k, v in params.items()} + + +class HITLResponseDetailCollection(BaseModel): + """Schema for a collection of Human-in-the-loop responses.""" + + hitl_responses: list[HITLResponseDetail] + total_entries: int diff --git a/providers/standard/src/airflow/providers/standard/api_fastapi/core_api/routes/__init__.py b/providers/standard/src/airflow/providers/standard/api_fastapi/core_api/routes/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/providers/standard/src/airflow/providers/standard/api_fastapi/core_api/routes/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/providers/standard/src/airflow/providers/standard/api_fastapi/core_api/routes/hitl.py b/providers/standard/src/airflow/providers/standard/api_fastapi/core_api/routes/hitl.py new file mode 100644 index 0000000000000..2d96915fd6075 --- /dev/null +++ b/providers/standard/src/airflow/providers/standard/api_fastapi/core_api/routes/hitl.py @@ -0,0 +1,147 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from uuid import UUID + +import structlog +from fastapi import Depends, HTTPException, status +from sqlalchemy import select + +from airflow.api_fastapi.auth.managers.models.resource_details import DagAccessEntity +from airflow.api_fastapi.common.db.common import SessionDep, paginated_select +from airflow.api_fastapi.common.router import AirflowRouter +from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc +from airflow.api_fastapi.core_api.security import GetUserDep, ReadableTIFilterDep, requires_access_dag +from airflow.models import HITLResponseModel +from airflow.models.taskinstance import TaskInstance as TI +from airflow.providers.standard.api_fastapi.core_api.datamodels.hitl import ( + HITLResponseContentDetail, + HITLResponseDetail, + HITLResponseDetailCollection, + UpdateHITLResponsePayload, +) +from airflow.providers.standard.models import HITLResponseModel +from airflow.utils import timezone + +hitl_router = AirflowRouter(tags=["HumanInTheLoop"]) + +log = structlog.get_logger(__name__) + + +@hitl_router.patch( + "/{task_instance_id}", + responses=create_openapi_http_exception_doc( + [ + status.HTTP_404_NOT_FOUND, + status.HTTP_409_CONFLICT, + ] + ), + dependencies=[ + Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.TASK_INSTANCE)), + ], +) +def update_hitl_response( + task_instance_id: UUID, + update_hitl_response_payload: UpdateHITLResponsePayload, + user: GetUserDep, + session: SessionDep, +) -> HITLResponseContentDetail: + """Update a Human-in-the-loop response.""" + ti_id_str = str(task_instance_id) + hitl_response_model = session.scalar( + select(HITLResponseModel).where(HITLResponseModel.ti_id == ti_id_str) + ) + if not hitl_response_model: + raise HTTPException( + status.HTTP_404_NOT_FOUND, + f"Human-in-the-loop Response does not exist for Task Instance with id {ti_id_str}", + ) + + if hitl_response_model.response_received: + raise HTTPException( + status.HTTP_409_CONFLICT, + f"Human-in-the-loop Response has already been updated for Task Instance with id {ti_id_str} " + "and is not allowed to write again.", + ) + + hitl_response_model.response_content = update_hitl_response_payload.response_content + hitl_response_model.user_id = user.get_id() + hitl_response_model.response_at = timezone.utcnow() + session.add(hitl_response_model) + session.commit() + return HITLResponseContentDetail.model_validate(hitl_response_model) + + +@hitl_router.get( + "/{task_instance_id}", + status_code=status.HTTP_200_OK, + responses=create_openapi_http_exception_doc( + [ + status.HTTP_404_NOT_FOUND, + status.HTTP_409_CONFLICT, + ] + ), + dependencies=[Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.TASK_INSTANCE))], +) +def get_hitl_response( + task_instance_id: UUID, + session: SessionDep, +) -> HITLResponseDetail: + """Get a Human-in-the-loop Response of a specific task instance.""" + ti_id_str = str(task_instance_id) + hitl_response_model = session.scalar( + select(HITLResponseModel).where(HITLResponseModel.ti_id == ti_id_str) + ) + if not hitl_response_model: + log.error("Human-in-the-loop response not found") + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail={ + "reason": "not_found", + "message": "Human-in-the-loop response not found", + }, + ) + return HITLResponseDetail.model_validate(hitl_response_model) + + +@hitl_router.get( + "/", + status_code=status.HTTP_200_OK, + dependencies=[ + Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.TASK_INSTANCE)), + ], +) +def get_hitl_responses( + readable_ti_filter: ReadableTIFilterDep, + session: SessionDep, +) -> HITLResponseDetailCollection: + """Get Human-in-the-loop Responses.""" + query = select(HITLResponseModel).join( + TI, + HITLResponseModel.ti_id == TI.id, + ) + hitl_response_select, total_entries = paginated_select( + statement=query, + filters=[readable_ti_filter], + session=session, + ) + hitl_responses = session.scalars(hitl_response_select) + return HITLResponseDetailCollection( + hitl_responses=hitl_responses, + total_entries=total_entries, + ) diff --git a/providers/standard/src/airflow/providers/standard/api_fastapi/execution_api/__init__.py b/providers/standard/src/airflow/providers/standard/api_fastapi/execution_api/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/providers/standard/src/airflow/providers/standard/api_fastapi/execution_api/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/providers/standard/src/airflow/providers/standard/api_fastapi/execution_api/datamodels/__init__.py b/providers/standard/src/airflow/providers/standard/api_fastapi/execution_api/datamodels/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/providers/standard/src/airflow/providers/standard/api_fastapi/execution_api/datamodels/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/providers/standard/src/airflow/providers/standard/api_fastapi/execution_api/datamodels/hitl.py b/providers/standard/src/airflow/providers/standard/api_fastapi/execution_api/datamodels/hitl.py new file mode 100644 index 0000000000000..a1e74c812a9c3 --- /dev/null +++ b/providers/standard/src/airflow/providers/standard/api_fastapi/execution_api/datamodels/hitl.py @@ -0,0 +1,59 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from collections.abc import MutableMapping +from datetime import datetime +from uuid import UUID + +from airflow.api_fastapi.core_api.base import BaseModel + + +class HITLInputRequestResponse(BaseModel): + """Schema for the input request part of a Human-in-the-loop Response for a specific task instance.""" + + ti_id: UUID + options: list[str] + subject: str + body: str | None = None + default: list[str] | None = None + multiple: bool = False + params: MutableMapping | None = None + + +class GetHITLResponseContentDetailPayload(BaseModel): + """Schema for getting a Human-in-the-loop response content detail for a specific task instance.""" + + ti_id: UUID + + +class UpdateHITLResponsePayload(BaseModel): + """Schema for writing a Human-in-the-loop response content detail for a specific task instance.""" + + ti_id: UUID + response_content: str + params_input: MutableMapping | None = None + + +class HITLResponseContentDetail(BaseModel): + """Schema for Human-in-the-loop response content detail for a specific task instance.""" + + response_received: bool + response_at: datetime | None + user_id: str | None + response_content: str | None + params_input: MutableMapping | None = None diff --git a/providers/standard/src/airflow/providers/standard/api_fastapi/execution_api/routes/__init__.py b/providers/standard/src/airflow/providers/standard/api_fastapi/execution_api/routes/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/providers/standard/src/airflow/providers/standard/api_fastapi/execution_api/routes/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/providers/standard/src/airflow/providers/standard/exceptions.py b/providers/standard/src/airflow/providers/standard/exceptions.py index 66acd54aa450f..718c7aded65dc 100644 --- a/providers/standard/src/airflow/providers/standard/exceptions.py +++ b/providers/standard/src/airflow/providers/standard/exceptions.py @@ -55,3 +55,7 @@ class ExternalDagFailedError(AirflowExternalTaskSensorException): class DuplicateStateError(AirflowExternalTaskSensorException): """Raised when duplicate states are provided across allowed, skipped and failed states.""" + + +class HITLTriggerEventError(AirflowException): + """Raised when TriggerEvent contains error.""" diff --git a/providers/standard/src/airflow/providers/standard/execution_time/__init__.py b/providers/standard/src/airflow/providers/standard/execution_time/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/providers/standard/src/airflow/providers/standard/execution_time/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/providers/standard/src/airflow/providers/standard/execution_time/comms.py b/providers/standard/src/airflow/providers/standard/execution_time/comms.py new file mode 100644 index 0000000000000..95e56dcb0ee51 --- /dev/null +++ b/providers/standard/src/airflow/providers/standard/execution_time/comms.py @@ -0,0 +1,67 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from typing import Literal + +from airflow.providers.standard.api_fastapi.execution_api.datamodels.hitl import ( + GetHITLResponseContentDetailPayload, + HITLInputRequestResponse, + HITLResponseContentDetail, + UpdateHITLResponsePayload, +) + + +class CreateHITLResponsePayload(HITLInputRequestResponse): + """Add the input request part of a Human-in-the-loop response.""" + + type: Literal["CreateHITLResponsePayload"] = "CreateHITLResponsePayload" + + +class HITLInputRequestResponseResult(HITLInputRequestResponse): + """Response to CreateHITLResponsePayload request.""" + + type: Literal["HITLInputRequestResponseResult"] = "HITLInputRequestResponseResult" + + +class GetHITLResponseContentDetail(GetHITLResponseContentDetailPayload): + """Get the response content part of a Human-in-the-loop response.""" + + type: Literal["GetHITLResponseContentDetail"] = "GetHITLResponseContentDetail" + + +class UpdateHITLResponse(UpdateHITLResponsePayload): + """Update the response content part of an existing Human-in-the-loop response.""" + + type: Literal["UpdateHITLResponse"] = "UpdateHITLResponse" + + +class HITLResponseContentDetailResult(HITLResponseContentDetail): + """Response to GetHITLResponseContentDetail request.""" + + type: Literal["HITLResponseContentDetailResult"] = "HITLResponseContentDetailResult" + + @classmethod + def from_api_response(cls, response: HITLResponseContentDetail) -> HITLResponseContentDetailResult: + """ + Create result class from API Response. + + API Response is autogenerated from the API schema, so we need to convert it to Result + for communication between the Supervisor and the task process since it needs a + discriminator field. + """ + return cls(**response.model_dump(exclude_defaults=True), type="HITLResponseContentDetailResult") diff --git a/providers/standard/src/airflow/providers/standard/execution_time/hitl.py b/providers/standard/src/airflow/providers/standard/execution_time/hitl.py new file mode 100644 index 0000000000000..f7c45cf2ee2d1 --- /dev/null +++ b/providers/standard/src/airflow/providers/standard/execution_time/hitl.py @@ -0,0 +1,83 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from collections.abc import MutableMapping +from typing import TYPE_CHECKING +from uuid import UUID + +from airflow.providers.standard.execution_time.comms import ( + CreateHITLResponsePayload, + GetHITLResponseContentDetail, + UpdateHITLResponse, +) + +if TYPE_CHECKING: + from airflow.providers.standard.api_fastapi.execution_api.datamodels.hitl import HITLResponseContentDetail + + +def add_hitl_input_request( + ti_id: UUID, + options: list[str], + subject: str, + body: str | None = None, + default: list[str] | None = None, + multiple: bool = False, + params: MutableMapping | None = None, +) -> None: + from airflow.sdk.execution_time.task_runner import SUPERVISOR_COMMS + + SUPERVISOR_COMMS.send( + msg=CreateHITLResponsePayload( + ti_id=ti_id, + options=options, + subject=subject, + body=body, + default=default, + params=params, + multiple=multiple, + ) + ) + + +def update_htil_response_content_detail( + ti_id: UUID, + response_content: str, + params_input: MutableMapping | None = None, +) -> HITLResponseContentDetail: + from airflow.sdk.execution_time.task_runner import SUPERVISOR_COMMS + + response = SUPERVISOR_COMMS.send( + msg=UpdateHITLResponse( + ti_id=ti_id, + response_content=response_content, + params_input=params_input, + ), + ) + if TYPE_CHECKING: + assert isinstance(response, HITLResponseContentDetail) + return response + + +def get_hitl_response_content_detail(ti_id: UUID) -> HITLResponseContentDetail: + from airflow.sdk.execution_time.task_runner import SUPERVISOR_COMMS + + response = SUPERVISOR_COMMS.send(msg=GetHITLResponseContentDetail(ti_id=ti_id)) + + if TYPE_CHECKING: + assert isinstance(response, HITLResponseContentDetail) + return response diff --git a/providers/standard/src/airflow/providers/standard/get_provider_info.py b/providers/standard/src/airflow/providers/standard/get_provider_info.py index bb40bfaa7b21c..7ff28246b299a 100644 --- a/providers/standard/src/airflow/providers/standard/get_provider_info.py +++ b/providers/standard/src/airflow/providers/standard/get_provider_info.py @@ -58,6 +58,7 @@ def get_provider_info(): "airflow.providers.standard.operators.latest_only", "airflow.providers.standard.operators.smooth", "airflow.providers.standard.operators.branch", + "airflow.providers.standard.operators.hitl", ], } ], @@ -93,9 +94,16 @@ def get_provider_info(): "airflow.providers.standard.triggers.external_task", "airflow.providers.standard.triggers.file", "airflow.providers.standard.triggers.temporal", + "airflow.providers.standard.triggers.hitl", ], } ], + "plugins": [ + { + "name": "standard_hitl", + "plugin-class": "airflow.providers.standard.plugins.hitl.HumanInTheLoopPlugin", + } + ], "extra-links": [ "airflow.providers.standard.operators.trigger_dagrun.TriggerDagRunLink", "airflow.providers.standard.sensors.external_task.ExternalDagLink", diff --git a/providers/standard/src/airflow/providers/standard/migrations/README b/providers/standard/src/airflow/providers/standard/migrations/README new file mode 100644 index 0000000000000..2500aa1bcf726 --- /dev/null +++ b/providers/standard/src/airflow/providers/standard/migrations/README @@ -0,0 +1 @@ +Generic single-database configuration. diff --git a/providers/standard/src/airflow/providers/standard/migrations/__init__.py b/providers/standard/src/airflow/providers/standard/migrations/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/providers/standard/src/airflow/providers/standard/migrations/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/providers/standard/src/airflow/providers/standard/migrations/env.py b/providers/standard/src/airflow/providers/standard/migrations/env.py new file mode 100644 index 0000000000000..13143137e58be --- /dev/null +++ b/providers/standard/src/airflow/providers/standard/migrations/env.py @@ -0,0 +1,126 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import contextlib +from logging import getLogger +from logging.config import fileConfig + +from alembic import context + +from airflow import settings +from airflow.providers.standard.models.db import HITLDBManager + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +version_table = HITLDBManager.version_table_name + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if not getLogger().handlers and config.config_file_name: + fileConfig(config.config_file_name, disable_existing_loggers=False) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = HITLDBManager.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def include_object(_, name, type_, *args): + if type_ == "table" and name not in target_metadata.tables: + return False + return True + + +def run_migrations_offline(): + """ + Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + context.configure( + url=settings.SQL_ALCHEMY_CONN, + target_metadata=target_metadata, + literal_binds=True, + compare_type=True, + compare_server_default=True, + render_as_batch=True, + version_table=version_table, + include_object=include_object, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """ + Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + + def process_revision_directives(context, revision, directives): + if getattr(config.cmd_opts, "autogenerate", False): + script = directives[0] + if script.upgrade_ops and script.upgrade_ops.is_empty(): + directives[:] = [] + print("No change detected in ORM schema, skipping revision.") + + with contextlib.ExitStack() as stack: + connection = config.attributes.get("connection", None) + + if not connection: + connection = stack.push(settings.engine.connect()) + + context.configure( + connection=connection, + transaction_per_migration=True, + target_metadata=target_metadata, + compare_type=True, + compare_server_default=True, + include_object=include_object, + render_as_batch=True, + process_revision_directives=process_revision_directives, + version_table=version_table, + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/providers/standard/src/airflow/providers/standard/migrations/script.py.mako b/providers/standard/src/airflow/providers/standard/migrations/script.py.mako new file mode 100644 index 0000000000000..6f890ef2b20a5 --- /dev/null +++ b/providers/standard/src/airflow/providers/standard/migrations/script.py.mako @@ -0,0 +1,45 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} +standard_provider_verison = None + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/providers/standard/src/airflow/providers/standard/migrations/versions/0001_3_0_placeholder_migration.py b/providers/standard/src/airflow/providers/standard/migrations/versions/0001_3_0_placeholder_migration.py new file mode 100644 index 0000000000000..dad65f4fc5da3 --- /dev/null +++ b/providers/standard/src/airflow/providers/standard/migrations/versions/0001_3_0_placeholder_migration.py @@ -0,0 +1,45 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Add hitl_response table. + +Revision ID: 5e7113ca79cc +Revises: +Create Date: 2025-06-13 17:06:38.040510 + +Note: This is a placeholder migration used to stamp the migration +when we create the migration from the ORM. Otherwise, it will run +without stamping the migration, leading to subsequent changes to +the tables not being migrated. +""" + +from __future__ import annotations + +# revision identifiers, used by Alembic. +revision: str = "5e7113ca79cc" +down_revision = None +branch_labels = None +depends_on = None +standard_provider_verison = "1.3.0" + + +def upgrade() -> None: ... + + +def downgrade() -> None: ... diff --git a/providers/standard/src/airflow/providers/standard/migrations/versions/__init__.py b/providers/standard/src/airflow/providers/standard/migrations/versions/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/providers/standard/src/airflow/providers/standard/migrations/versions/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/providers/standard/src/airflow/providers/standard/models/__init__.py b/providers/standard/src/airflow/providers/standard/models/__init__.py new file mode 100644 index 0000000000000..cb558a7aee2bb --- /dev/null +++ b/providers/standard/src/airflow/providers/standard/models/__init__.py @@ -0,0 +1,71 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +import sqlalchemy_jsonfield +from sqlalchemy import Boolean, Column, MetaData, String, Text +from sqlalchemy.dialects import postgresql +from sqlalchemy.ext.hybrid import hybrid_property +from sqlalchemy.orm import registry + +from airflow.models.base import _get_schema, naming_convention +from airflow.settings import json +from airflow.utils.sqlalchemy import UtcDateTime + +metadata = MetaData(schema=_get_schema(), naming_convention=naming_convention) +mapper_registry = registry(metadata=metadata) + +if TYPE_CHECKING: + Base = Any # type: ignore[misc] +else: + Base = mapper_registry.generate_base() + + +class HITLResponseModel(Base): + """Human-in-the-loop received response.""" + + __tablename__ = "hitl_response" + ti_id = Column( + String(36).with_variant(postgresql.UUID(as_uuid=False), "postgresql"), + primary_key=True, + nullable=False, + ) + + # Input Request + options = Column(sqlalchemy_jsonfield.JSONField(json=json), nullable=False) + subject = Column(Text, nullable=False) + body = Column(Text, nullable=True) + default = Column(sqlalchemy_jsonfield.JSONField(json=json), nullable=True) + multiple = Column(Boolean, unique=False, default=False) + + params = Column(sqlalchemy_jsonfield.JSONField(json=json), nullable=True) + + # Response Content Detail + response_at = Column(UtcDateTime, nullable=True) + user_id = Column(String(128), nullable=True) + response_content = Column( + sqlalchemy_jsonfield.JSONField(json=json), + nullable=True, + default=None, + ) + params_input = Column(sqlalchemy_jsonfield.JSONField(json=json), nullable=True) + + @hybrid_property + def response_received(self) -> bool: + return self.response_at is not None diff --git a/providers/standard/src/airflow/providers/standard/models/db.py b/providers/standard/src/airflow/providers/standard/models/db.py new file mode 100644 index 0000000000000..f3a56c6b588c4 --- /dev/null +++ b/providers/standard/src/airflow/providers/standard/models/db.py @@ -0,0 +1,45 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import logging +from pathlib import Path + +from airflow.providers.standard.models import metadata +from airflow.providers.standard.version_compat import AIRFLOW_V_3_1_PLUS +from airflow.utils.db_manager import BaseDBManager + +PACKAGE_DIR = Path(__file__).parents[1] + +_REVISION_HEADS_MAP: dict[str, str] = { + "1.3.0": "5e7113ca79cc", +} +log = logging.getLogger(__name__) + +if not AIRFLOW_V_3_1_PLUS: + log.warning("Human in the loop functionality needs Airflow 3.1+. Skip loadding HITLDBManager.") +else: + + class HITLDBManager(BaseDBManager): + """Manages Human in the loop database.""" + + metadata = metadata + version_table_name = "alembic_version_hitl" + migration_dir = (PACKAGE_DIR / "migrations").as_posix() + alembic_file = (PACKAGE_DIR / "alembic.ini").as_posix() + supports_table_dropping = True + revision_heads_map = _REVISION_HEADS_MAP diff --git a/providers/standard/src/airflow/providers/standard/operators/hitl.py b/providers/standard/src/airflow/providers/standard/operators/hitl.py new file mode 100644 index 0000000000000..22c09638f101f --- /dev/null +++ b/providers/standard/src/airflow/providers/standard/operators/hitl.py @@ -0,0 +1,205 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import logging +from collections.abc import Sequence +from datetime import datetime, timezone +from typing import TYPE_CHECKING, Any + +from airflow.models import SkipMixin +from airflow.models.baseoperator import BaseOperator +from airflow.providers.standard.exceptions import HITLTriggerEventError +from airflow.providers.standard.triggers.hitl import HITLTrigger +from airflow.providers.standard.version_compat import AIRFLOW_V_3_1_PLUS +from airflow.sdk.definitions.param import ParamsDict +from airflow.sdk.execution_time.hitl import add_hitl_response + +if TYPE_CHECKING: + from airflow.sdk.definitions.context import Context + +log = logging.getLogger(__name__) +if not AIRFLOW_V_3_1_PLUS: + log.warning("Human in the loop functionality needs Airflow 3.1+..") + + +class HITLOperator(BaseOperator): + """ + Base class for all Human-in-the-loop Operators to inherit from. + + :param subject: Headline/subject presented to the user for the interaction task + :param options: List of options that the human can select from and click to complete the task. + Buttons on the UI will be presented in the order of the list + :param body: descriptive text that might give background, hints or can provide background or summary of + details that are needed to decide + :param default: The default result (highlighted button) and result that is taken if timeout is passed + :param params: dictionary of parameter definitions that are in the format of Dag params such that + a Form Field can be rendered. Entered data is validated (schema, required fields) like for a Dag run + and added to XCom of the task result + """ + + template_fields: Sequence[str] = ("subject", "body") + + allow_arbitrary_input: bool = False + + def __init__( + self, + *, + subject: str, + options: list[str], + body: str | None = None, + default: str | list[str] | None = None, + params: ParamsDict | None = None, + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.options = options + self.subject = subject + self.body = body + self.params = params or {} + self.multiple = False + self.default = [default] if isinstance(default, str) else default + + self.validate_default() + + def validate_default(self) -> None: + if self.default is None and self.execution_timeout: + raise ValueError('"default" is required when "execution_timeout" is provided.') + + def execute(self, context: Context): + ti_id = context["task_instance"].id + # Write Human-in-the-loop input request to DB + add_hitl_response( + ti_id=ti_id, + options=self.options, + subject=self.subject, + body=self.body, + default=self.default, + multiple=self.multiple, + params=self.serializable_params, + ) + self.log.info("Waiting for response") + if self.execution_timeout: + timeout_datetime = datetime.now(timezone.utc) + self.execution_timeout + else: + timeout_datetime = None + # Defer the Human-in-the-loop response checking process to HITLTrigger + self.defer( + trigger=HITLTrigger( + ti_id=ti_id, + options=self.options, + default=self.default, + params=self.serializable_params, + multiple=self.multiple, + timeout_datetime=timeout_datetime, + ), + method_name="execute_complete", + ) + + def execute_complete(self, context: Context, event: dict[str, Any]) -> Any: + if "error" in event: + raise HITLTriggerEventError(event["error"]) + + response_content = event["response_content"] + params_input = event["params_input"] + if self.allow_arbitrary_input: + self.validate_response_content(response_content) + self.validate_params_input(params_input) + return { + "response_content": response_content, + "params_input": params_input, + } + + def validate_response_content(self, response_content: str | list[str]) -> None: + if isinstance(response_content, list): + if self.multiple is False: + raise ValueError( + f"Multiple response {response_content} received while multiple is set to False" + ) + + if diff := set(response_content) - set(self.options): + raise ValueError(f"Responses {diff} not in {self.options}") + + if response_content not in self.options: + raise ValueError(f"Response {response_content} not in {self.options}") + + def validate_params_input(self, params_input: dict | None) -> None: + if ( + self.serializable_params is not None + and params_input is not None + and set(self.serializable_params.keys()) ^ set(params_input) + ): + raise ValueError(f"params_input {params_input} does not match params {self.params}") + + @property + def serializable_params(self) -> dict[str, Any] | None: + return self.params.dump() if isinstance(self.params, ParamsDict) else self.params + + +class ApprovalOperator(HITLOperator): + """Human-in-the-loop Operator that has only 'Approval' and 'Reject' options.""" + + def __init__(self, **kwargs) -> None: + if "options" in kwargs: + kwargs.pop("options") + self.log.warning("Passing options into ApprovalOperator will be ignored.") + super().__init__(options=["Approve", "Reject"], **kwargs) + + +class HITLTerminationOperator(HITLOperator, SkipMixin): + """ShortCirquitOperator to terminate the Dag run by human decision.""" + + def __init__(self, **kwargs) -> None: + if "options" in kwargs: + kwargs.pop("options") + self.log.warning("Passing options into ApprovalOperator will be ignored.") + super().__init__(options=["Stop", "Proceed"], **kwargs) + + def execute_complete(self, context: Context, event: dict[str, Any]) -> None: + raise NotImplementedError + + +class HITLBranchOperator(HITLOperator): + """BranchOperator based on Human-in-the-loop Response.""" + + def __init__(self, *, multiple: bool = False, **kwargs) -> None: + super().__init__(**kwargs) + self.multiple = multiple + + def execute_complete(self, context: Context, event: dict[str, Any]) -> None: + raise NotImplementedError + + +class HITLEntryOperator(HITLOperator): + """ + User can add further information with all options that a TriggerForm allows (same like Dag params). + + Options and default default to ["OK"] but can be over-ridden. + """ + + def __init__( + self, + **kwargs, + ) -> None: + if "options" not in kwargs: + kwargs["options"] = ["OK"] + kwargs["default"] = ["OK"] + + super().__init__(**kwargs) + + def execute_complete(self, context: Context, event: dict[str, Any]) -> None: + raise NotImplementedError diff --git a/providers/standard/src/airflow/providers/standard/plugins/__init__.py b/providers/standard/src/airflow/providers/standard/plugins/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/providers/standard/src/airflow/providers/standard/plugins/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/providers/standard/src/airflow/providers/standard/plugins/hitl.py b/providers/standard/src/airflow/providers/standard/plugins/hitl.py new file mode 100644 index 0000000000000..b801eeec39300 --- /dev/null +++ b/providers/standard/src/airflow/providers/standard/plugins/hitl.py @@ -0,0 +1,75 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import logging +from typing import TYPE_CHECKING, Any + +from fastapi import FastAPI + +from airflow.api_fastapi.app import create_auth_manager +from airflow.plugins_manager import AirflowPlugin +from airflow.providers.standard.version_compat import AIRFLOW_V_3_1_PLUS +from airflow.utils.session import NEW_SESSION, provide_session + +log = logging.getLogger(__name__) + +if TYPE_CHECKING: + from sqlalchemy.orm import Session + +if AIRFLOW_V_3_1_PLUS: + + @provide_session + def _get_api_endpoint(session: Session = NEW_SESSION) -> dict[str, Any]: + from airflow.utils.db import DBLocks, create_global_lock + + with create_global_lock(session=session, lock=DBLocks.MIGRATIONS): + engine = session.get_bind().engine + from airflow.providers.standard.models import HITLResponseModel + + HITLResponseModel.metadata.create_all(engine) + + from airflow.providers.standard.api_fastapi.core_api.routes.hitl import hitl_router + + hitl_api_app = FastAPI( + title="Airflow Human-in-the-loop API", + description=( + "This is Airflow Human-in-the-loop API - which allow human interactions." + "You can find more information in AIP-90 " + "https://cwiki.apache.org/confluence/display/AIRFLOW/AIP-90+Human+in+the+loop" + ), + ) + hitl_api_app.include_router(hitl_router) + am = create_auth_manager() + am.init() + hitl_api_app.state.auth_manager = am + + return { + "app": hitl_api_app, + "url_prefix": "/hitl-responses", + "name": "Airflow Human in the loop API", + } + + +class HumanInTheLoopPlugin(AirflowPlugin): + """Human in the loop plugin for Airflow.""" + + name = "standard_hitl" + if AIRFLOW_V_3_1_PLUS: + fastapi_apps = [_get_api_endpoint()] + else: + log.warning("Human in the loop functionality needs Airflow 3.1+. Skip loadding HITLDBManager.") diff --git a/providers/standard/src/airflow/providers/standard/triggers/hitl.py b/providers/standard/src/airflow/providers/standard/triggers/hitl.py new file mode 100644 index 0000000000000..59638a34d0c39 --- /dev/null +++ b/providers/standard/src/airflow/providers/standard/triggers/hitl.py @@ -0,0 +1,113 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import asyncio +import logging +from collections.abc import AsyncIterator +from datetime import datetime, timezone +from typing import Any +from uuid import UUID + +from asgiref.sync import sync_to_async + +from airflow.providers.standard.version_compat import AIRFLOW_V_3_1_PLUS +from airflow.sdk.execution_time.hitl import ( + get_hitl_response_content_detail, + update_htil_response_content_detail, +) +from airflow.triggers.base import BaseTrigger, TriggerEvent + +log = logging.getLogger(__name__) +if not AIRFLOW_V_3_1_PLUS: + log.warning("Human in the loop functionality needs Airflow 3.1+..") + + +class HITLTrigger(BaseTrigger): + """A trigger that checks whether Human-in-the-loop responses are received.""" + + def __init__( + self, + *, + ti_id: UUID, + options: list[str], + default: list[str] | None = None, + params: dict[str, Any], + multiple: bool = False, + timeout_datetime: datetime | None, + poke_interval: float = 5.0, + **kwargs, + ): + super().__init__(**kwargs) + self.ti_id = ti_id + self.options = options + self.timeout_datetime = timeout_datetime + self.default = default + self.params = params + self.multiple = multiple + self.poke_interval = poke_interval + + def serialize(self) -> tuple[str, dict[str, Any]]: + """Serialize HITLTrigger arguments and classpath.""" + return ( + "airflow.providers.standard.triggers.hitl.HITLTrigger", + { + "ti_id": self.ti_id, + "options": self.options, + "default": self.default, + "params": self.params, + "multiple": self.multiple, + "timeout_datetime": self.timeout_datetime, + "poke_interval": self.poke_interval, + }, + ) + + async def run(self) -> AsyncIterator[TriggerEvent]: + """Loop until the Human-in-the-loop response received or timeout reached.""" + while True: + if self.timeout_datetime and self.timeout_datetime < datetime.now(timezone.utc): + if self.default is None: + yield TriggerEvent( + { + "error": 'default" is required when "execution_timeout" is provided.', + } + ) + return + + default_content: str = self.default[0] if isinstance(self.default, list) else self.default + resp = await sync_to_async(update_htil_response_content_detail)( + ti_id=self.ti_id, response_content=default_content + ) + yield TriggerEvent( + { + "response_content": default_content, + "params_input": self.params, + } + ) + return + + resp = await sync_to_async(get_hitl_response_content_detail)(ti_id=self.ti_id) + if resp.response_received: + self.log.info("Responded by %s at %s", resp.user_id, resp.response_at) + yield TriggerEvent( + { + "response_content": resp.response_content, + "params_input": resp.params_input, + } + ) + return + await asyncio.sleep(self.poke_interval) diff --git a/providers/standard/tests/unit/standard/api/__init__.py b/providers/standard/tests/unit/standard/api/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/providers/standard/tests/unit/standard/api/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/providers/standard/tests/unit/standard/api/test_client.py b/providers/standard/tests/unit/standard/api/test_client.py new file mode 100644 index 0000000000000..527b6d5c2cf3a --- /dev/null +++ b/providers/standard/tests/unit/standard/api/test_client.py @@ -0,0 +1,136 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from datetime import datetime +from typing import TYPE_CHECKING + +import httpx +from uuid6 import uuid7 + +from airflow.providers.standard.api_fastapi.execution_api.datamodels.hitl import HITLResponseContentDetail +from airflow.providers.standard.execution_time.comms import HITLInputRequestResponseResult +from airflow.sdk.api.client import Client +from airflow.utils import timezone + +if TYPE_CHECKING: + from time_machine import TimeMachineFixture + + +def make_client(transport: httpx.MockTransport) -> Client: + """Get a client with a custom transport.""" + return Client(base_url="test://server", token="", transport=transport) + + +class TestHITLOperations: + def test_add_response(self) -> None: + ti_id = uuid7() + + def handle_request(request: httpx.Request) -> httpx.Response: + if request.url.path in (f"/hitl-responses/{ti_id}"): + return httpx.Response( + status_code=201, + json={ + "ti_id": str(ti_id), + "options": ["Approval", "Reject"], + "subject": "This is subject", + "body": "This is body", + "default": ["Approval"], + "params": None, + "multiple": False, + }, + ) + return httpx.Response(status_code=400, json={"detail": "Bad Request"}) + + client = make_client(transport=httpx.MockTransport(handle_request)) + result = client.hitl.add_response( + ti_id=ti_id, + options=["Approval", "Reject"], + subject="This is subject", + body="This is body", + default=["Approval"], + params=None, + multiple=False, + ) + assert isinstance(result, HITLInputRequestResponseResult) + assert result.ti_id == ti_id + assert result.options == ["Approval", "Reject"] + assert result.subject == "This is subject" + assert result.body == "This is body" + assert result.default == ["Approval"] + assert result.params is None + assert result.multiple is False + + def test_update_response(self, time_machine: TimeMachineFixture) -> None: + time_machine.move_to(datetime(2025, 7, 3, 0, 0, 0)) + ti_id = uuid7() + + def handle_request(request: httpx.Request) -> httpx.Response: + if request.url.path in (f"/hitl-responses/{ti_id}"): + return httpx.Response( + status_code=200, + json={ + "response_content": "Approval", + "params_input": None, + "user_id": "admin", + "response_received": True, + "response_at": "2025-07-03T00:00:00Z", + }, + ) + return httpx.Response(status_code=400, json={"detail": "Bad Request"}) + + client = make_client(transport=httpx.MockTransport(handle_request)) + result = client.hitl.update_response( + ti_id=ti_id, + response_content="Approve", + params_input=None, + ) + assert isinstance(result, HITLResponseContentDetail) + assert result.response_received is True + assert result.response_content == "Approval" + assert result.params_input is None + assert result.user_id == "admin" + assert result.response_at == timezone.datetime(2025, 7, 3, 0, 0, 0) + + def test_get_response_content_detail(self, time_machine: TimeMachineFixture) -> None: + time_machine.move_to(datetime(2025, 7, 3, 0, 0, 0)) + ti_id = uuid7() + + def handle_request(request: httpx.Request) -> httpx.Response: + if request.url.path in (f"/hitl-responses/{ti_id}"): + return httpx.Response( + status_code=200, + json={ + "response_content": "Approval", + "params_input": None, + "user_id": "admin", + "response_received": True, + "response_at": "2025-07-03T00:00:00Z", + }, + ) + return httpx.Response(status_code=400, json={"detail": "Bad Request"}) + + client = make_client(transport=httpx.MockTransport(handle_request)) + result = client.hitl.get_response_content_detail( + ti_id=ti_id, + ) + assert isinstance(result, HITLResponseContentDetail) + assert result.response_received is True + assert result.response_content == "Approval" + assert result.params_input is None + assert result.user_id == "admin" + assert result.response_at == timezone.datetime(2025, 7, 3, 0, 0, 0) diff --git a/providers/standard/tests/unit/standard/api_fastapi/__init__.py b/providers/standard/tests/unit/standard/api_fastapi/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/providers/standard/tests/unit/standard/api_fastapi/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/providers/standard/tests/unit/standard/api_fastapi/core_api/__init__.py b/providers/standard/tests/unit/standard/api_fastapi/core_api/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/providers/standard/tests/unit/standard/api_fastapi/core_api/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/providers/standard/tests/unit/standard/api_fastapi/core_api/datamodels/__init__.py b/providers/standard/tests/unit/standard/api_fastapi/core_api/datamodels/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/providers/standard/tests/unit/standard/api_fastapi/core_api/datamodels/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/providers/standard/tests/unit/standard/api_fastapi/core_api/datamodels/test_hitl.py b/providers/standard/tests/unit/standard/api_fastapi/core_api/datamodels/test_hitl.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/providers/standard/tests/unit/standard/api_fastapi/core_api/datamodels/test_hitl.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/providers/standard/tests/unit/standard/api_fastapi/core_api/routes/__init__.py b/providers/standard/tests/unit/standard/api_fastapi/core_api/routes/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/providers/standard/tests/unit/standard/api_fastapi/core_api/routes/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/providers/standard/tests/unit/standard/api_fastapi/core_api/routes/test_hitl.py b/providers/standard/tests/unit/standard/api_fastapi/core_api/routes/test_hitl.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/providers/standard/tests/unit/standard/api_fastapi/core_api/routes/test_hitl.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/providers/standard/tests/unit/standard/api_fastapi/execution_api/__init__.py b/providers/standard/tests/unit/standard/api_fastapi/execution_api/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/providers/standard/tests/unit/standard/api_fastapi/execution_api/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/providers/standard/tests/unit/standard/api_fastapi/execution_api/datamodels/__init__.py b/providers/standard/tests/unit/standard/api_fastapi/execution_api/datamodels/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/providers/standard/tests/unit/standard/api_fastapi/execution_api/datamodels/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/providers/standard/tests/unit/standard/api_fastapi/execution_api/datamodels/test_hitl.py b/providers/standard/tests/unit/standard/api_fastapi/execution_api/datamodels/test_hitl.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/providers/standard/tests/unit/standard/api_fastapi/execution_api/datamodels/test_hitl.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/providers/standard/tests/unit/standard/api_fastapi/execution_api/routes/__init__.py b/providers/standard/tests/unit/standard/api_fastapi/execution_api/routes/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/providers/standard/tests/unit/standard/api_fastapi/execution_api/routes/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/providers/standard/tests/unit/standard/api_fastapi/execution_api/routes/test_hitl.py b/providers/standard/tests/unit/standard/api_fastapi/execution_api/routes/test_hitl.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/providers/standard/tests/unit/standard/api_fastapi/execution_api/routes/test_hitl.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/providers/standard/tests/unit/standard/execution_time/__init__.py b/providers/standard/tests/unit/standard/execution_time/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/providers/standard/tests/unit/standard/execution_time/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/providers/standard/tests/unit/standard/execution_time/test_comms.py b/providers/standard/tests/unit/standard/execution_time/test_comms.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/providers/standard/tests/unit/standard/execution_time/test_comms.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/providers/standard/tests/unit/standard/execution_time/test_hitl.py b/providers/standard/tests/unit/standard/execution_time/test_hitl.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/providers/standard/tests/unit/standard/execution_time/test_hitl.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/providers/standard/tests/unit/standard/migrations/__init__.py b/providers/standard/tests/unit/standard/migrations/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/providers/standard/tests/unit/standard/migrations/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/providers/standard/tests/unit/standard/migrations/test_env.py b/providers/standard/tests/unit/standard/migrations/test_env.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/providers/standard/tests/unit/standard/migrations/test_env.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/providers/standard/tests/unit/standard/models/__init__.py b/providers/standard/tests/unit/standard/models/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/providers/standard/tests/unit/standard/models/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/providers/standard/tests/unit/standard/models/test_db.py b/providers/standard/tests/unit/standard/models/test_db.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/providers/standard/tests/unit/standard/models/test_db.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/providers/standard/tests/unit/standard/operators/test_hitl.py b/providers/standard/tests/unit/standard/operators/test_hitl.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/providers/standard/tests/unit/standard/operators/test_hitl.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/providers/standard/tests/unit/standard/plugins/__init__.py b/providers/standard/tests/unit/standard/plugins/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/providers/standard/tests/unit/standard/plugins/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/providers/standard/tests/unit/standard/plugins/test_hitl.py b/providers/standard/tests/unit/standard/plugins/test_hitl.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/providers/standard/tests/unit/standard/plugins/test_hitl.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/providers/standard/tests/unit/standard/triggers/test_hitl.py b/providers/standard/tests/unit/standard/triggers/test_hitl.py new file mode 100644 index 0000000000000..6a1edb46cd20f --- /dev/null +++ b/providers/standard/tests/unit/standard/triggers/test_hitl.py @@ -0,0 +1,126 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +import pytest + +from tests_common.test_utils.db import AIRFLOW_V_3_1_PLUS + +if not AIRFLOW_V_3_1_PLUS: + pytest.skip("Human in the loop public API compatible with Airflow >= 3.0.1", allow_module_level=True) + +import asyncio +from datetime import timedelta +from unittest import mock + +from uuid6 import uuid7 + +from airflow.api_fastapi.execution_api.datamodels.hitl import HITLResponseContentDetail +from airflow.providers.standard.triggers.hitl import HITLTrigger, HITLTriggerEventSuccessPayload +from airflow.triggers.base import TriggerEvent +from airflow.utils.timezone import utcnow + +TI_ID = uuid7() + + +class TestHITLTrigger: + def test_serialization(self): + trigger = HITLTrigger( + ti_id=TI_ID, + options=["1", "2", "3", "4", "5"], + params={"input": 1}, + default=["1"], + multiple=False, + timeout_datetime=None, + poke_interval=50.0, + ) + classpath, kwargs = trigger.serialize() + assert classpath == "airflow.providers.standard.triggers.hitl.HITLTrigger" + assert kwargs == { + "ti_id": TI_ID, + "options": ["1", "2", "3", "4", "5"], + "params": {"input": 1}, + "default": ["1"], + "multiple": False, + "timeout_datetime": None, + "poke_interval": 50.0, + } + + @pytest.mark.db_test + @pytest.mark.asyncio + @mock.patch("airflow.sdk.execution_time.hitl.update_htil_response_content_detail") + async def test_run_fallback_to_default_due_to_timeout(self, mock_update, mock_supervisor_comms): + trigger = HITLTrigger( + ti_id=TI_ID, + options=["1", "2", "3", "4", "5"], + params={"input": 1}, + default=["1"], + multiple=False, + timeout_datetime=utcnow() + timedelta(seconds=0.1), + poke_interval=5, + ) + mock_supervisor_comms.send.return_value = HITLResponseContentDetail( + response_received=False, + user_id=None, + response_at=None, + response_content=None, + params_input={}, + ) + + gen = trigger.run() + trigger_task = asyncio.create_task(gen.__anext__()) + await asyncio.sleep(0.3) + event = await trigger_task + assert event == TriggerEvent( + HITLTriggerEventSuccessPayload( + response_content=["1"], + params_input={"input": 1}, + ) + ) + + @pytest.mark.db_test + @pytest.mark.asyncio + @mock.patch("airflow.sdk.execution_time.hitl.update_htil_response_content_detail") + async def test_run(self, mock_update, mock_supervisor_comms): + trigger = HITLTrigger( + ti_id=TI_ID, + options=["1", "2", "3", "4", "5"], + params={"input": 1}, + default=["1"], + multiple=False, + timeout_datetime=None, + poke_interval=5, + ) + mock_supervisor_comms.send.return_value = HITLResponseContentDetail( + response_received=True, + user_id="test", + response_at=utcnow(), + response_content=["3"], + params_input={"input": 50}, + ) + + gen = trigger.run() + trigger_task = asyncio.create_task(gen.__anext__()) + await asyncio.sleep(0.3) + event = await trigger_task + assert event == TriggerEvent( + HITLTriggerEventSuccessPayload( + response_content=["3"], + params_input={"input": 50}, + ) + ) diff --git a/pyproject.toml b/pyproject.toml index a0b4af0e9a9bc..3d6cbf2ee5bed 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -726,6 +726,7 @@ testing = ["dev", "providers.tests", "tests_common", "tests", "system", "unit", # Annotated is central for FastAPI dependency injection, skipping rules for FastAPI folders. "airflow-core/src/airflow/api_fastapi/*" = ["TC001", "TC002"] "airflow-core/tests/unit/api_fastapi/*" = ["T001", "TC002"] +"providers/standard/src/airflow/providers/standard/api_fastapi/*" = ["TC001", "TC002"] # Ignore pydoc style from these "*.pyi" = ["D"] diff --git a/task-sdk/src/airflow/sdk/api/client.py b/task-sdk/src/airflow/sdk/api/client.py index 179bb9a443f5a..47b09a8d2a89c 100644 --- a/task-sdk/src/airflow/sdk/api/client.py +++ b/task-sdk/src/airflow/sdk/api/client.py @@ -17,6 +17,7 @@ from __future__ import annotations +import contextlib import logging import sys import uuid @@ -751,6 +752,17 @@ def asset_events(self) -> AssetEventOperations: """Operations related to Asset Events.""" return AssetEventOperations(self) + # TODO: Remove this block once we can make the execution API pluggable. + with contextlib.suppress(ModuleNotFoundError): + + @lru_cache() # type: ignore[misc] + @property + def hitl(self): + from airflow.providers.standard.api.client import HITLOperations + + """Operations related to HITL Responses.""" + return HITLOperations(self) + # This is only used for parsing. ServerResponseError is raised instead class _ErrorBody(BaseModel): diff --git a/task-sdk/src/airflow/sdk/api/datamodels/_generated.py b/task-sdk/src/airflow/sdk/api/datamodels/_generated.py index f0d3d45d1684a..a515fe65cd8fa 100644 --- a/task-sdk/src/airflow/sdk/api/datamodels/_generated.py +++ b/task-sdk/src/airflow/sdk/api/datamodels/_generated.py @@ -102,6 +102,23 @@ class ConnectionResponse(BaseModel): extra: Annotated[str | None, Field(title="Extra")] = None +class CreateHITLResponsePayload(BaseModel): + """ + Add the input request part of a Human-in-the-loop response. + """ + + ti_id: Annotated[UUID, Field(title="Ti Id")] + options: Annotated[list[str], Field(title="Options")] + subject: Annotated[str, Field(title="Subject")] + body: Annotated[str | None, Field(title="Body")] = None + default: Annotated[list[str] | None, Field(title="Default")] = None + multiple: Annotated[bool | None, Field(title="Multiple")] = False + params: Annotated[dict[str, Any] | None, Field(title="Params")] = None + type: Annotated[Literal["CreateHITLResponsePayload"] | None, Field(title="Type")] = ( + "CreateHITLResponsePayload" + ) + + class DagRunAssetReference(BaseModel): """ DagRun serializer for asset responses. @@ -154,6 +171,32 @@ class DagRunType(str, Enum): ASSET_TRIGGERED = "asset_triggered" +class HITLInputRequestResponse(BaseModel): + """ + Schema for the input request part of a Human-in-the-loop Response for a specific task instance. + """ + + ti_id: Annotated[UUID, Field(title="Ti Id")] + options: Annotated[list[str], Field(title="Options")] + subject: Annotated[str, Field(title="Subject")] + body: Annotated[str | None, Field(title="Body")] = None + default: Annotated[list[str] | None, Field(title="Default")] = None + multiple: Annotated[bool | None, Field(title="Multiple")] = False + params: Annotated[dict[str, Any] | None, Field(title="Params")] = None + + +class HITLResponseContentDetail(BaseModel): + """ + Schema for Human-in-the-loop response content detail for a specific task instance. + """ + + response_received: Annotated[bool, Field(title="Response Received")] + response_at: Annotated[AwareDatetime | None, Field(title="Response At")] = None + user_id: Annotated[str | None, Field(title="User Id")] = None + response_content: Annotated[str | None, Field(title="Response Content")] = None + params_input: Annotated[dict[str, Any] | None, Field(title="Params Input")] = None + + class InactiveAssetsResponse(BaseModel): """ Response for inactive assets. @@ -325,6 +368,17 @@ class TriggerDAGRunPayload(BaseModel): reset_dag_run: Annotated[bool | None, Field(title="Reset Dag Run")] = False +class UpdateHITLResponse(BaseModel): + """ + Update the response content part of an existing Human-in-the-loop response. + """ + + ti_id: Annotated[UUID, Field(title="Ti Id")] + response_content: Annotated[str, Field(title="Response Content")] + params_input: Annotated[dict[str, Any] | None, Field(title="Params Input")] = None + type: Annotated[Literal["UpdateHITLResponse"] | None, Field(title="Type")] = "UpdateHITLResponse" + + class ValidationError(BaseModel): loc: Annotated[list[str | int], Field(title="Location")] msg: Annotated[str, Field(title="Message")] diff --git a/task-sdk/src/airflow/sdk/execution_time/comms.py b/task-sdk/src/airflow/sdk/execution_time/comms.py index d52e9bb3cab5f..00c6d278381f6 100644 --- a/task-sdk/src/airflow/sdk/execution_time/comms.py +++ b/task-sdk/src/airflow/sdk/execution_time/comms.py @@ -96,6 +96,21 @@ # Available on Unix and Windows (so "everywhere") but lets be safe recv_fds = None # type: ignore[assignment] +# TODO: Remove this block once we can make the execution API pluggable. +try: + from airflow.providers.standard.execution_time.comms import ( + CreateHITLResponsePayload, + GetHITLResponseContentDetail, + HITLInputRequestResponseResult, + UpdateHITLResponse, + ) +except ModuleNotFoundError: + GetHITLResponseContentDetail = object # type: ignore[misc, assignment] + CreateHITLResponsePayload = object # type: ignore[misc, assignment] + HITLInputRequestResponseResult = object # type: ignore[misc, assignment] + UpdateHITLResponse = object # type: ignore[misc, assignment] + + if TYPE_CHECKING: from structlog.typing import FilteringBoundLogger as Logger @@ -576,6 +591,8 @@ class SentFDs(BaseModel): | XComSequenceIndexResult | XComSequenceSliceResult | InactiveAssetsResult + | CreateHITLResponsePayload + | HITLInputRequestResponseResult | OKResponse, Field(discriminator="type"), ] @@ -868,6 +885,10 @@ class GetDRCount(BaseModel): | TaskState | TriggerDagRun | DeleteVariable - | ResendLoggingFD, + | ResendLoggingFD + # HITL response from standard provider + | CreateHITLResponsePayload + | UpdateHITLResponse + | GetHITLResponseContentDetail, Field(discriminator="type"), ] diff --git a/task-sdk/src/airflow/sdk/execution_time/supervisor.py b/task-sdk/src/airflow/sdk/execution_time/supervisor.py index 690411979865e..1451cbfe40505 100644 --- a/task-sdk/src/airflow/sdk/execution_time/supervisor.py +++ b/task-sdk/src/airflow/sdk/execution_time/supervisor.py @@ -20,6 +20,7 @@ from __future__ import annotations import atexit +import contextlib import io import logging import os @@ -1231,6 +1232,23 @@ def _handle_request(self, msg: ToSupervisor, log: FilteringBoundLogger, req_id: # Since we've sent the message, return. Nothing else in this ifelse/switch should return directly return else: + # TODO: Remove this block once we can make the execution API pluggable. + with contextlib.suppress(ModuleNotFoundError): + from airflow.providers.standard.execution_time.comms import CreateHITLResponsePayload + + if isinstance(msg, CreateHITLResponsePayload): + resp = self.client.hitl.add_response( + ti_id=msg.ti_id, + options=msg.options, + subject=msg.subject, + body=msg.body, + default=msg.default, + params=msg.params, + multiple=msg.multiple, + ) + self.send_msg(resp, request_id=req_id, error=None, **dump_opts) + return + log.error("Unhandled request", msg=msg) self.send_msg( None, From 79752b93215bf4aeb1a5ce5c9bd46bbea2805ec0 Mon Sep 17 00:00:00 2001 From: Wei Lee Date: Fri, 4 Jul 2025 19:05:20 +0800 Subject: [PATCH 02/30] feat(hitl): move hitl execution api to core --- .../core_api/routes/public/__init__.py | 2 + .../execution_api/datamodels/hitl.py | 0 .../execution_api/routes/__init__.py | 1 - .../api_fastapi/execution_api/routes/hitl.py | 9 +- .../src/airflow/jobs/triggerer_job_runner.py | 46 +++--- airflow-core/src/airflow/utils/db.py | 25 +--- .../core_api/routes/public/test_plugins.py | 7 +- .../unit/plugins/test_plugins_manager.py | 2 +- airflow-core/tests/unit/utils/test_db.py | 6 - providers/standard/provider.yaml | 4 - providers/standard/pyproject.toml | 3 - .../airflow/providers/standard/alembic.ini | 133 ----------------- .../providers/standard/api/__init__.py | 39 ----- .../airflow/providers/standard/api/client.py | 97 ------------- .../standard/execution_time/__init__.py | 16 --- .../standard/execution_time/comms.py | 67 --------- .../providers/standard/get_provider_info.py | 6 - .../providers/standard/migrations/README | 1 - .../providers/standard/migrations/__init__.py | 16 --- .../providers/standard/migrations/env.py | 126 ---------------- .../standard/migrations/script.py.mako | 45 ------ .../0001_3_0_placeholder_migration.py | 45 ------ .../standard/migrations/versions/__init__.py | 16 --- .../airflow/providers/standard/models/db.py | 45 ------ .../providers/standard/plugins/__init__.py | 16 --- .../providers/standard/plugins/hitl.py | 75 ---------- .../tests/unit/standard/api/__init__.py | 16 --- .../tests/unit/standard/api/test_client.py | 136 ------------------ .../standard/api_fastapi/core_api/__init__.py | 16 --- .../core_api/datamodels/__init__.py | 16 --- .../core_api/datamodels/test_hitl.py | 16 --- .../api_fastapi/core_api/routes/__init__.py | 16 --- .../api_fastapi/core_api/routes/test_hitl.py | 16 --- .../api_fastapi/execution_api/__init__.py | 16 --- .../execution_api/datamodels/__init__.py | 16 --- .../execution_api/datamodels/test_hitl.py | 16 --- .../execution_api/routes/__init__.py | 16 --- .../execution_api/routes/test_hitl.py | 16 --- .../unit/standard/execution_time/__init__.py | 16 --- .../standard/execution_time/test_comms.py | 16 --- .../unit/standard/execution_time/test_hitl.py | 16 --- .../unit/standard/migrations/__init__.py | 16 --- .../unit/standard/migrations/test_env.py | 16 --- .../tests/unit/standard/models/__init__.py | 16 --- .../tests/unit/standard/models/test_db.py | 16 --- .../tests/unit/standard/plugins/__init__.py | 16 --- .../tests/unit/standard/plugins/test_hitl.py | 16 --- pyproject.toml | 1 - task-sdk/src/airflow/sdk/api/client.py | 84 +++++++++-- .../src/airflow/sdk/execution_time/comms.py | 44 ++++-- .../src/airflow/sdk}/execution_time/hitl.py | 11 +- .../airflow/sdk/execution_time/supervisor.py | 30 ++-- 52 files changed, 158 insertions(+), 1316 deletions(-) rename {providers/standard/src/airflow/providers/standard => airflow-core/src/airflow}/api_fastapi/execution_api/datamodels/hitl.py (100%) delete mode 100644 providers/standard/src/airflow/providers/standard/alembic.ini delete mode 100644 providers/standard/src/airflow/providers/standard/api/__init__.py delete mode 100644 providers/standard/src/airflow/providers/standard/api/client.py delete mode 100644 providers/standard/src/airflow/providers/standard/execution_time/__init__.py delete mode 100644 providers/standard/src/airflow/providers/standard/execution_time/comms.py delete mode 100644 providers/standard/src/airflow/providers/standard/migrations/README delete mode 100644 providers/standard/src/airflow/providers/standard/migrations/__init__.py delete mode 100644 providers/standard/src/airflow/providers/standard/migrations/env.py delete mode 100644 providers/standard/src/airflow/providers/standard/migrations/script.py.mako delete mode 100644 providers/standard/src/airflow/providers/standard/migrations/versions/0001_3_0_placeholder_migration.py delete mode 100644 providers/standard/src/airflow/providers/standard/migrations/versions/__init__.py delete mode 100644 providers/standard/src/airflow/providers/standard/models/db.py delete mode 100644 providers/standard/src/airflow/providers/standard/plugins/__init__.py delete mode 100644 providers/standard/src/airflow/providers/standard/plugins/hitl.py delete mode 100644 providers/standard/tests/unit/standard/api/__init__.py delete mode 100644 providers/standard/tests/unit/standard/api/test_client.py delete mode 100644 providers/standard/tests/unit/standard/api_fastapi/core_api/__init__.py delete mode 100644 providers/standard/tests/unit/standard/api_fastapi/core_api/datamodels/__init__.py delete mode 100644 providers/standard/tests/unit/standard/api_fastapi/core_api/datamodels/test_hitl.py delete mode 100644 providers/standard/tests/unit/standard/api_fastapi/core_api/routes/__init__.py delete mode 100644 providers/standard/tests/unit/standard/api_fastapi/core_api/routes/test_hitl.py delete mode 100644 providers/standard/tests/unit/standard/api_fastapi/execution_api/__init__.py delete mode 100644 providers/standard/tests/unit/standard/api_fastapi/execution_api/datamodels/__init__.py delete mode 100644 providers/standard/tests/unit/standard/api_fastapi/execution_api/datamodels/test_hitl.py delete mode 100644 providers/standard/tests/unit/standard/api_fastapi/execution_api/routes/__init__.py delete mode 100644 providers/standard/tests/unit/standard/api_fastapi/execution_api/routes/test_hitl.py delete mode 100644 providers/standard/tests/unit/standard/execution_time/__init__.py delete mode 100644 providers/standard/tests/unit/standard/execution_time/test_comms.py delete mode 100644 providers/standard/tests/unit/standard/execution_time/test_hitl.py delete mode 100644 providers/standard/tests/unit/standard/migrations/__init__.py delete mode 100644 providers/standard/tests/unit/standard/migrations/test_env.py delete mode 100644 providers/standard/tests/unit/standard/models/__init__.py delete mode 100644 providers/standard/tests/unit/standard/models/test_db.py delete mode 100644 providers/standard/tests/unit/standard/plugins/__init__.py delete mode 100644 providers/standard/tests/unit/standard/plugins/test_hitl.py rename {providers/standard/src/airflow/providers/standard => task-sdk/src/airflow/sdk}/execution_time/hitl.py (89%) diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/__init__.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/__init__.py index fbbfb46dfa8d0..6db86ce2327a6 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/__init__.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/__init__.py @@ -37,6 +37,7 @@ from airflow.api_fastapi.core_api.routes.public.dags import dags_router from airflow.api_fastapi.core_api.routes.public.event_logs import event_logs_router from airflow.api_fastapi.core_api.routes.public.extra_links import extra_links_router +from airflow.api_fastapi.core_api.routes.public.hitl import hitl_router from airflow.api_fastapi.core_api.routes.public.import_error import import_error_router from airflow.api_fastapi.core_api.routes.public.job import job_router from airflow.api_fastapi.core_api.routes.public.log import task_instances_log_router @@ -83,6 +84,7 @@ authenticated_router.include_router(dag_parsing_router) authenticated_router.include_router(dag_tags_router) authenticated_router.include_router(dag_versions_router) +authenticated_router.include_router(hitl_router) # Include authenticated router in public router diff --git a/providers/standard/src/airflow/providers/standard/api_fastapi/execution_api/datamodels/hitl.py b/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/hitl.py similarity index 100% rename from providers/standard/src/airflow/providers/standard/api_fastapi/execution_api/datamodels/hitl.py rename to airflow-core/src/airflow/api_fastapi/execution_api/datamodels/hitl.py diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/routes/__init__.py b/airflow-core/src/airflow/api_fastapi/execution_api/routes/__init__.py index 383fc48ba8ee9..5d2dd78a1a1a7 100644 --- a/airflow-core/src/airflow/api_fastapi/execution_api/routes/__init__.py +++ b/airflow-core/src/airflow/api_fastapi/execution_api/routes/__init__.py @@ -51,5 +51,4 @@ authenticated_router.include_router(xcoms.router, prefix="/xcoms", tags=["XComs"]) authenticated_router.include_router(hitl.router, prefix="/hitl-responses", tags=["Human in the Loop"]) - execution_api_router.include_router(authenticated_router) diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/routes/hitl.py b/airflow-core/src/airflow/api_fastapi/execution_api/routes/hitl.py index 3bd7e67558324..74fa3a4fa2696 100644 --- a/airflow-core/src/airflow/api_fastapi/execution_api/routes/hitl.py +++ b/airflow-core/src/airflow/api_fastapi/execution_api/routes/hitl.py @@ -24,15 +24,12 @@ from sqlalchemy import select from airflow.api_fastapi.common.db.common import SessionDep -from airflow.providers.standard.api_fastapi.execution_api.datamodels.hitl import ( +from airflow.api_fastapi.execution_api.datamodels.hitl import ( HITLInputRequestResponse, -) -from airflow.providers.standard.execution_time.comms import ( - CreateHITLResponsePayload, HITLResponseContentDetail, - UpdateHITLResponse, ) from airflow.providers.standard.models import HITLResponseModel +from airflow.sdk.execution_time.comms import CreateHITLResponsePayload, UpdateHITLResponse router = APIRouter() @@ -43,7 +40,7 @@ "/{task_instance_id}", status_code=status.HTTP_201_CREATED, ) -def add_hitl_input_request( +def add_hitl_response( task_instance_id: UUID, payload: CreateHITLResponsePayload, session: SessionDep, diff --git a/airflow-core/src/airflow/jobs/triggerer_job_runner.py b/airflow-core/src/airflow/jobs/triggerer_job_runner.py index f91f326b1dd80..139cd3f85af9a 100644 --- a/airflow-core/src/airflow/jobs/triggerer_job_runner.py +++ b/airflow-core/src/airflow/jobs/triggerer_job_runner.py @@ -43,7 +43,7 @@ from airflow.jobs.base_job_runner import BaseJobRunner from airflow.jobs.job import perform_heartbeat from airflow.models.trigger import Trigger -from airflow.providers.standard.api.client import UpdateHITLResponse +from airflow.sdk.api.datamodels._generated import HITLResponseContentDetail from airflow.sdk.execution_time.comms import ( CommsDecoder, ConnectionResult, @@ -53,12 +53,14 @@ GetConnection, GetDagRunState, GetDRCount, + GetHITLResponseContentDetail, GetTaskStates, GetTICount, GetVariable, GetXCom, TaskStatesResult, TICount, + UpdateHITLResponse, VariableResult, XComResult, _RequestFrame, @@ -73,19 +75,6 @@ from airflow.utils.module_loading import import_string from airflow.utils.session import provide_session -# TODO: Remove this block once we can make the execution API pluggable. -try: - from airflow.providers.standard.execution_time.comms import ( - GetHITLResponseContentDetail, - HITLResponseContentDetailResult, - UpdateHITLResponse, - ) -except ModuleNotFoundError: - GetHITLResponseContentDetail = object # type: ignore[misc, assignment] - UpdateHITLResponse = object # type: ignore[misc, assignment] - HITLResponseContentDetailResult = object # type: ignore[misc, assignment] - - if TYPE_CHECKING: from sqlalchemy.orm import Session from structlog.typing import FilteringBoundLogger, WrappedLogger @@ -223,6 +212,23 @@ class TriggerStateSync(BaseModel): to_cancel: set[int] +class HITLResponseContentDetailResult(HITLResponseContentDetail): + """Response to GetHITLResponseContentDetail request.""" + + type: Literal["HITLResponseContentDetailResult"] = "HITLResponseContentDetailResult" + + @classmethod + def from_api_response(cls, response: HITLResponseContentDetail) -> HITLResponseContentDetailResult: + """ + Create result class from API Response. + + API Response is autogenerated from the API schema, so we need to convert it to Result + for communication between the Supervisor and the task process since it needs a + discriminator field. + """ + return cls(**response.model_dump(exclude_defaults=True), type="HITLResponseContentDetailResult") + + ToTriggerRunner = Annotated[ messages.StartTriggerer | messages.TriggerStateSync @@ -465,22 +471,14 @@ def _handle_request(self, msg: ToTriggerSupervisor, log: FilteringBoundLogger, r resp = TaskStatesResult.from_api_response(run_id_task_state_map) else: resp = run_id_task_state_map - # TODO: Remove this block once we can make the execution API pluggable. - elif issubclass(UpdateHITLResponse, BaseModel) and isinstance(msg, UpdateHITLResponse): - if TYPE_CHECKING: - assert HITLResponseContentDetailResult is not None + elif isinstance(msg, UpdateHITLResponse): api_resp = self.client.hitl.update_response( ti_id=msg.ti_id, response_content=msg.response_content, params_input=msg.params_input, ) resp = HITLResponseContentDetailResult.from_api_response(response=api_resp) - # TODO: Remove this block once we can make the execution API pluggable. - elif issubclass(GetHITLResponseContentDetail, BaseModel) and isinstance( - msg, GetHITLResponseContentDetail - ): - if TYPE_CHECKING: - assert HITLResponseContentDetailResult is not None + elif isinstance(msg, GetHITLResponseContentDetail): api_resp = self.client.hitl.get_response_content_detail(ti_id=msg.ti_id) resp = HITLResponseContentDetailResult.from_api_response(response=api_resp) else: diff --git a/airflow-core/src/airflow/utils/db.py b/airflow-core/src/airflow/utils/db.py index da281fd201810..8666deac458d4 100644 --- a/airflow-core/src/airflow/utils/db.py +++ b/airflow-core/src/airflow/utils/db.py @@ -20,7 +20,6 @@ import collections.abc import contextlib import enum -import importlib import itertools import json import logging @@ -1059,24 +1058,14 @@ def downgrade(*, to_revision, from_revision=None, show_sql_only=False, session: if _revision_greater(config, _REVISION_HEADS_MAP["2.10.3"], to_revision): unitest_mode = conf.getboolean("core", "unit_test_mode") if unitest_mode: - from packaging.version import Version - - from airflow import __version__ - - external_db_mangers = [("airflow.providers.fab.auth_manager.models.db", "FABDBManager")] - if Version(__version__) >= Version("3.1.0"): - external_db_mangers.append(("airflow.providers.standard.models.db", "HITLDBManager")) - - for module_path, cls_name in external_db_mangers: - try: - mangaer_module = importlib.import_module(module_path) - manager_obj = getattr(mangaer_module, cls_name) - dbm = manager_obj(session) - dbm.initdb() - except ImportError: - log.warning("Import error occurred while importing %s. Skipping the check.", cls_name) - return + try: + from airflow.providers.fab.auth_manager.models.db import FABDBManager + dbm = FABDBManager(session) + dbm.initdb() + except ImportError: + log.warning("Import error occurred while importing FABDBManager. Skipping the check.") + return if not inspect(settings.engine).has_table("ab_user") and not unitest_mode: raise AirflowException( "Downgrade to revision less than 3.0.0 requires that `ab_user` table is present. " diff --git a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_plugins.py b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_plugins.py index 788ac22d85a2b..2b84d441b3f96 100644 --- a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_plugins.py +++ b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_plugins.py @@ -33,7 +33,7 @@ class TestGetPlugins: # Filters ( {}, - 14, + 13, [ "MetadataCollectionPlugin", "OpenLineageProviderPlugin", @@ -46,17 +46,16 @@ class TestGetPlugins: "plugin-c", "postload", "priority_weight_strategy_plugin", - "standard_hitl", "test_plugin", "workday_timetable_plugin", ], ), ( {"limit": 3, "offset": 2}, - 14, + 13, ["databricks_workflow", "decreasing_priority_weight_strategy_plugin", "edge_executor"], ), - ({"limit": 1}, 14, ["MetadataCollectionPlugin"]), + ({"limit": 1}, 13, ["MetadataCollectionPlugin"]), ], ) def test_should_respond_200( diff --git a/airflow-core/tests/unit/plugins/test_plugins_manager.py b/airflow-core/tests/unit/plugins/test_plugins_manager.py index b145c716e2f19..ae65bb2e79012 100644 --- a/airflow-core/tests/unit/plugins/test_plugins_manager.py +++ b/airflow-core/tests/unit/plugins/test_plugins_manager.py @@ -351,7 +351,7 @@ def test_does_not_double_import_entrypoint_provider_plugins(self): assert len(plugins_manager.plugins) == 0 plugins_manager.load_entrypoint_plugins() plugins_manager.load_providers_plugins() - assert len(plugins_manager.plugins) == 5 + assert len(plugins_manager.plugins) == 4 class TestPluginsDirectorySource: diff --git a/airflow-core/tests/unit/utils/test_db.py b/airflow-core/tests/unit/utils/test_db.py index 51d722b4c2e81..cf69275025330 100644 --- a/airflow-core/tests/unit/utils/test_db.py +++ b/airflow-core/tests/unit/utils/test_db.py @@ -37,7 +37,6 @@ from airflow.exceptions import AirflowException from airflow.models import Base as airflow_base from airflow.providers.fab.auth_manager.models.db import FABDBManager -from airflow.providers.standard.models.db import HITLDBManager from airflow.settings import engine from airflow.utils.db import ( _REVISION_HEADS_MAP, @@ -75,9 +74,6 @@ def test_database_schema_and_sqlalchemy_model_are_in_sync(self): # test FAB models for table_name, table in FABDBManager.metadata.tables.items(): all_meta_data._add_table(table_name, table.schema, table) - # test Human-in-the-loop models - for table_name, table in HITLDBManager.metadata.tables.items(): - all_meta_data._add_table(table_name, table.schema, table) # create diff between database schema and SQLAlchemy model mctx = MigrationContext.configure( engine.connect(), @@ -103,8 +99,6 @@ def test_database_schema_and_sqlalchemy_model_are_in_sync(self): lambda t: (t[0] == "remove_table" and t[1].name == "sqlite_sequence"), # fab version table lambda t: (t[0] == "remove_table" and t[1].name == "alembic_version_fab"), - # hitl version table - lambda t: (t[0] == "remove_table" and t[1].name == "alembic_version_hitl"), # Ignore _xcom_archive table lambda t: (t[0] == "remove_table" and t[1].name == "_xcom_archive"), ] diff --git a/providers/standard/provider.yaml b/providers/standard/provider.yaml index b0f083674f16b..6ccab9577bc7a 100644 --- a/providers/standard/provider.yaml +++ b/providers/standard/provider.yaml @@ -96,10 +96,6 @@ triggers: - airflow.providers.standard.triggers.temporal - airflow.providers.standard.triggers.hitl -plugins: - - name: standard_hitl - plugin-class: airflow.providers.standard.plugins.hitl.HumanInTheLoopPlugin - extra-links: - airflow.providers.standard.operators.trigger_dagrun.TriggerDagRunLink - airflow.providers.standard.sensors.external_task.ExternalDagLink diff --git a/providers/standard/pyproject.toml b/providers/standard/pyproject.toml index 391a7dffdc6a9..6d73aaed13329 100644 --- a/providers/standard/pyproject.toml +++ b/providers/standard/pyproject.toml @@ -105,8 +105,5 @@ apache-airflow-providers-standard = {workspace = true} [project.entry-points."apache_airflow_provider"] provider_info = "airflow.providers.standard.get_provider_info:get_provider_info" -[project.entry-points."airflow.plugins"] -standard_hitl = "airflow.providers.standard.plugins.hitl:HumanInTheLoopPlugin" - [tool.flit.module] name = "airflow.providers.standard" diff --git a/providers/standard/src/airflow/providers/standard/alembic.ini b/providers/standard/src/airflow/providers/standard/alembic.ini deleted file mode 100644 index 75d42ee16d3b9..0000000000000 --- a/providers/standard/src/airflow/providers/standard/alembic.ini +++ /dev/null @@ -1,133 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# A generic, single database configuration. - -[alembic] -# path to migration scripts -# Use forward slashes (/) also on windows to provide an os agnostic path -script_location = %(here)s/migrations - -# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s -# Uncomment the line below if you want the files to be prepended with date and time -# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file -# for all available tokens -# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s - -# sys.path path, will be prepended to sys.path if present. -# defaults to the current working directory. -prepend_sys_path = . - -# timezone to use when rendering the date within the migration file -# as well as the filename. -# If specified, requires the python>=3.9 or backports.zoneinfo library. -# Any required deps can installed by adding `alembic[tz]` to the pip requirements -# string value is passed to ZoneInfo() -# leave blank for localtime -# timezone = - -# max length of characters to apply to the "slug" field -# truncate_slug_length = 40 - -# set to 'true' to run the environment during -# the 'revision' command, regardless of autogenerate -# revision_environment = false - -# set to 'true' to allow .pyc and .pyo files without -# a source .py file to be detected as revisions in the -# versions/ directory -# sourceless = false - -# version location specification; This defaults -# to alembic/versions. When using multiple version -# directories, initial revisions must be specified with --version-path. -# The path separator used here should be the separator specified by "version_path_separator" below. -# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions - -# version path separator; As mentioned above, this is the character used to split -# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. -# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. -# Valid values for version_path_separator are: -# -# version_path_separator = : -# version_path_separator = ; -# version_path_separator = space -version_path_separator = os # Use os.pathsep. Default configuration used for new projects. - -# set to 'true' to search source files recursively -# in each "version_locations" directory -# new in Alembic version 1.10 -# recursive_version_locations = false - -# the output encoding used when revision files -# are written from script.py.mako -# output_encoding = utf-8 - -sqlalchemy.url = scheme://localhost/airflow - - -[post_write_hooks] -# post_write_hooks defines scripts or Python functions that are run -# on newly generated revision scripts. See the documentation for further -# detail and examples - -# format using "black" - use the console_scripts runner, against the "black" entrypoint -# hooks = black -# black.type = console_scripts -# black.entrypoint = black -# black.options = -l 79 REVISION_SCRIPT_FILENAME - -# lint with attempts to fix using "ruff" - use the exec runner, execute a binary -# hooks = ruff -# ruff.type = exec -# ruff.executable = %(here)s/.venv/bin/ruff -# ruff.options = --fix REVISION_SCRIPT_FILENAME - -# Logging configuration -[loggers] -keys = root,sqlalchemy,alembic - -[handlers] -keys = console - -[formatters] -keys = generic - -[logger_root] -level = WARN -handlers = console -qualname = - -[logger_sqlalchemy] -level = WARN -handlers = -qualname = sqlalchemy.engine - -[logger_alembic] -level = INFO -handlers = -qualname = alembic - -[handler_console] -class = StreamHandler -args = (sys.stderr,) -level = NOTSET -formatter = generic - -[formatter_generic] -format = %(levelname)-5.5s [%(name)s] %(message)s -datefmt = %H:%M:%S diff --git a/providers/standard/src/airflow/providers/standard/api/__init__.py b/providers/standard/src/airflow/providers/standard/api/__init__.py deleted file mode 100644 index 920412ab9cba2..0000000000000 --- a/providers/standard/src/airflow/providers/standard/api/__init__.py +++ /dev/null @@ -1,39 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# -# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE -# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES. -# -# IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE -# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -# -from __future__ import annotations - -import packaging.version - -from airflow import __version__ as airflow_version - -__all__ = ["__version__"] - -__version__ = "1.3.0" - -if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse( - "2.10.0" -): - raise RuntimeError( - f"The package `apache-airflow-providers-standard:{__version__}` needs Apache Airflow 2.10.0+" - ) diff --git a/providers/standard/src/airflow/providers/standard/api/client.py b/providers/standard/src/airflow/providers/standard/api/client.py deleted file mode 100644 index fae8aacfa61cc..0000000000000 --- a/providers/standard/src/airflow/providers/standard/api/client.py +++ /dev/null @@ -1,97 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -import uuid -from collections.abc import MutableMapping -from typing import TYPE_CHECKING - -from airflow.providers.standard.api_fastapi.execution_api.datamodels.hitl import ( - HITLResponseContentDetail, -) -from airflow.providers.standard.execution_time.comms import ( - CreateHITLResponsePayload, - HITLInputRequestResponseResult, - UpdateHITLResponse, -) - -if TYPE_CHECKING: - from airflow.sdk.api.client import Client - - -class HITLOperations: - """ - Operations related to Human in the loop. Require Airflow 3.1+. - - :meta: private - """ - - __slots__ = ("client",) - - def __init__(self, client: Client) -> None: - self.client = client - - def add_response( - self, - *, - ti_id: uuid.UUID, - options: list[str], - subject: str, - body: str | None = None, - default: list[str] | None = None, - multiple: bool = False, - params: MutableMapping | None = None, - ) -> HITLInputRequestResponseResult: - """Add a Human-in-the-loop response that waits for human response for a specific Task Instance.""" - payload = CreateHITLResponsePayload( - ti_id=ti_id, - options=options, - subject=subject, - body=body, - default=default, - multiple=multiple, - params=params, - ) - resp = self.client.post( - f"/hitl-responses/{ti_id}", - content=payload.model_dump_json(), - ) - return HITLInputRequestResponseResult.model_validate_json(resp.read()) - - def update_response( - self, - *, - ti_id: uuid.UUID, - response_content: str, - params_input: MutableMapping | None = None, - ) -> HITLResponseContentDetail: - """Update an existing Human-in-the-loop response.""" - payload = UpdateHITLResponse( - ti_id=ti_id, - response_content=response_content, - params_input=params_input, - ) - resp = self.client.patch( - f"/hitl-responses/{ti_id}", - content=payload.model_dump_json(), - ) - return HITLResponseContentDetail.model_validate_json(resp.read()) - - def get_response_content_detail(self, ti_id: uuid.UUID) -> HITLResponseContentDetail: - """Get content part of a Human-in-the-loop response for a specific Task Instance.""" - resp = self.client.get(f"/hitl-responses/{ti_id}") - return HITLResponseContentDetail.model_validate_json(resp.read()) diff --git a/providers/standard/src/airflow/providers/standard/execution_time/__init__.py b/providers/standard/src/airflow/providers/standard/execution_time/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/standard/src/airflow/providers/standard/execution_time/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/standard/src/airflow/providers/standard/execution_time/comms.py b/providers/standard/src/airflow/providers/standard/execution_time/comms.py deleted file mode 100644 index 95e56dcb0ee51..0000000000000 --- a/providers/standard/src/airflow/providers/standard/execution_time/comms.py +++ /dev/null @@ -1,67 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -from typing import Literal - -from airflow.providers.standard.api_fastapi.execution_api.datamodels.hitl import ( - GetHITLResponseContentDetailPayload, - HITLInputRequestResponse, - HITLResponseContentDetail, - UpdateHITLResponsePayload, -) - - -class CreateHITLResponsePayload(HITLInputRequestResponse): - """Add the input request part of a Human-in-the-loop response.""" - - type: Literal["CreateHITLResponsePayload"] = "CreateHITLResponsePayload" - - -class HITLInputRequestResponseResult(HITLInputRequestResponse): - """Response to CreateHITLResponsePayload request.""" - - type: Literal["HITLInputRequestResponseResult"] = "HITLInputRequestResponseResult" - - -class GetHITLResponseContentDetail(GetHITLResponseContentDetailPayload): - """Get the response content part of a Human-in-the-loop response.""" - - type: Literal["GetHITLResponseContentDetail"] = "GetHITLResponseContentDetail" - - -class UpdateHITLResponse(UpdateHITLResponsePayload): - """Update the response content part of an existing Human-in-the-loop response.""" - - type: Literal["UpdateHITLResponse"] = "UpdateHITLResponse" - - -class HITLResponseContentDetailResult(HITLResponseContentDetail): - """Response to GetHITLResponseContentDetail request.""" - - type: Literal["HITLResponseContentDetailResult"] = "HITLResponseContentDetailResult" - - @classmethod - def from_api_response(cls, response: HITLResponseContentDetail) -> HITLResponseContentDetailResult: - """ - Create result class from API Response. - - API Response is autogenerated from the API schema, so we need to convert it to Result - for communication between the Supervisor and the task process since it needs a - discriminator field. - """ - return cls(**response.model_dump(exclude_defaults=True), type="HITLResponseContentDetailResult") diff --git a/providers/standard/src/airflow/providers/standard/get_provider_info.py b/providers/standard/src/airflow/providers/standard/get_provider_info.py index 7ff28246b299a..bd7118c78aadf 100644 --- a/providers/standard/src/airflow/providers/standard/get_provider_info.py +++ b/providers/standard/src/airflow/providers/standard/get_provider_info.py @@ -98,12 +98,6 @@ def get_provider_info(): ], } ], - "plugins": [ - { - "name": "standard_hitl", - "plugin-class": "airflow.providers.standard.plugins.hitl.HumanInTheLoopPlugin", - } - ], "extra-links": [ "airflow.providers.standard.operators.trigger_dagrun.TriggerDagRunLink", "airflow.providers.standard.sensors.external_task.ExternalDagLink", diff --git a/providers/standard/src/airflow/providers/standard/migrations/README b/providers/standard/src/airflow/providers/standard/migrations/README deleted file mode 100644 index 2500aa1bcf726..0000000000000 --- a/providers/standard/src/airflow/providers/standard/migrations/README +++ /dev/null @@ -1 +0,0 @@ -Generic single-database configuration. diff --git a/providers/standard/src/airflow/providers/standard/migrations/__init__.py b/providers/standard/src/airflow/providers/standard/migrations/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/standard/src/airflow/providers/standard/migrations/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/standard/src/airflow/providers/standard/migrations/env.py b/providers/standard/src/airflow/providers/standard/migrations/env.py deleted file mode 100644 index 13143137e58be..0000000000000 --- a/providers/standard/src/airflow/providers/standard/migrations/env.py +++ /dev/null @@ -1,126 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -import contextlib -from logging import getLogger -from logging.config import fileConfig - -from alembic import context - -from airflow import settings -from airflow.providers.standard.models.db import HITLDBManager - -# this is the Alembic Config object, which provides -# access to the values within the .ini file in use. -config = context.config - -version_table = HITLDBManager.version_table_name - -# Interpret the config file for Python logging. -# This line sets up loggers basically. -if not getLogger().handlers and config.config_file_name: - fileConfig(config.config_file_name, disable_existing_loggers=False) - -# add your model's MetaData object here -# for 'autogenerate' support -# from myapp import mymodel -# target_metadata = mymodel.Base.metadata -target_metadata = HITLDBManager.metadata - -# other values from the config, defined by the needs of env.py, -# can be acquired: -# my_important_option = config.get_main_option("my_important_option") -# ... etc. - - -def include_object(_, name, type_, *args): - if type_ == "table" and name not in target_metadata.tables: - return False - return True - - -def run_migrations_offline(): - """ - Run migrations in 'offline' mode. - - This configures the context with just a URL - and not an Engine, though an Engine is acceptable - here as well. By skipping the Engine creation - we don't even need a DBAPI to be available. - - Calls to context.execute() here emit the given string to the - script output. - - """ - context.configure( - url=settings.SQL_ALCHEMY_CONN, - target_metadata=target_metadata, - literal_binds=True, - compare_type=True, - compare_server_default=True, - render_as_batch=True, - version_table=version_table, - include_object=include_object, - ) - - with context.begin_transaction(): - context.run_migrations() - - -def run_migrations_online(): - """ - Run migrations in 'online' mode. - - In this scenario we need to create an Engine - and associate a connection with the context. - - """ - - def process_revision_directives(context, revision, directives): - if getattr(config.cmd_opts, "autogenerate", False): - script = directives[0] - if script.upgrade_ops and script.upgrade_ops.is_empty(): - directives[:] = [] - print("No change detected in ORM schema, skipping revision.") - - with contextlib.ExitStack() as stack: - connection = config.attributes.get("connection", None) - - if not connection: - connection = stack.push(settings.engine.connect()) - - context.configure( - connection=connection, - transaction_per_migration=True, - target_metadata=target_metadata, - compare_type=True, - compare_server_default=True, - include_object=include_object, - render_as_batch=True, - process_revision_directives=process_revision_directives, - version_table=version_table, - ) - - with context.begin_transaction(): - context.run_migrations() - - -if context.is_offline_mode(): - run_migrations_offline() -else: - run_migrations_online() diff --git a/providers/standard/src/airflow/providers/standard/migrations/script.py.mako b/providers/standard/src/airflow/providers/standard/migrations/script.py.mako deleted file mode 100644 index 6f890ef2b20a5..0000000000000 --- a/providers/standard/src/airflow/providers/standard/migrations/script.py.mako +++ /dev/null @@ -1,45 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -"""${message} - -Revision ID: ${up_revision} -Revises: ${down_revision | comma,n} -Create Date: ${create_date} - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa -${imports if imports else ""} - -# revision identifiers, used by Alembic. -revision = ${repr(up_revision)} -down_revision = ${repr(down_revision)} -branch_labels = ${repr(branch_labels)} -depends_on = ${repr(depends_on)} -standard_provider_verison = None - - -def upgrade() -> None: - ${upgrades if upgrades else "pass"} - - -def downgrade() -> None: - ${downgrades if downgrades else "pass"} diff --git a/providers/standard/src/airflow/providers/standard/migrations/versions/0001_3_0_placeholder_migration.py b/providers/standard/src/airflow/providers/standard/migrations/versions/0001_3_0_placeholder_migration.py deleted file mode 100644 index dad65f4fc5da3..0000000000000 --- a/providers/standard/src/airflow/providers/standard/migrations/versions/0001_3_0_placeholder_migration.py +++ /dev/null @@ -1,45 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -""" -Add hitl_response table. - -Revision ID: 5e7113ca79cc -Revises: -Create Date: 2025-06-13 17:06:38.040510 - -Note: This is a placeholder migration used to stamp the migration -when we create the migration from the ORM. Otherwise, it will run -without stamping the migration, leading to subsequent changes to -the tables not being migrated. -""" - -from __future__ import annotations - -# revision identifiers, used by Alembic. -revision: str = "5e7113ca79cc" -down_revision = None -branch_labels = None -depends_on = None -standard_provider_verison = "1.3.0" - - -def upgrade() -> None: ... - - -def downgrade() -> None: ... diff --git a/providers/standard/src/airflow/providers/standard/migrations/versions/__init__.py b/providers/standard/src/airflow/providers/standard/migrations/versions/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/standard/src/airflow/providers/standard/migrations/versions/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/standard/src/airflow/providers/standard/models/db.py b/providers/standard/src/airflow/providers/standard/models/db.py deleted file mode 100644 index f3a56c6b588c4..0000000000000 --- a/providers/standard/src/airflow/providers/standard/models/db.py +++ /dev/null @@ -1,45 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -import logging -from pathlib import Path - -from airflow.providers.standard.models import metadata -from airflow.providers.standard.version_compat import AIRFLOW_V_3_1_PLUS -from airflow.utils.db_manager import BaseDBManager - -PACKAGE_DIR = Path(__file__).parents[1] - -_REVISION_HEADS_MAP: dict[str, str] = { - "1.3.0": "5e7113ca79cc", -} -log = logging.getLogger(__name__) - -if not AIRFLOW_V_3_1_PLUS: - log.warning("Human in the loop functionality needs Airflow 3.1+. Skip loadding HITLDBManager.") -else: - - class HITLDBManager(BaseDBManager): - """Manages Human in the loop database.""" - - metadata = metadata - version_table_name = "alembic_version_hitl" - migration_dir = (PACKAGE_DIR / "migrations").as_posix() - alembic_file = (PACKAGE_DIR / "alembic.ini").as_posix() - supports_table_dropping = True - revision_heads_map = _REVISION_HEADS_MAP diff --git a/providers/standard/src/airflow/providers/standard/plugins/__init__.py b/providers/standard/src/airflow/providers/standard/plugins/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/standard/src/airflow/providers/standard/plugins/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/standard/src/airflow/providers/standard/plugins/hitl.py b/providers/standard/src/airflow/providers/standard/plugins/hitl.py deleted file mode 100644 index b801eeec39300..0000000000000 --- a/providers/standard/src/airflow/providers/standard/plugins/hitl.py +++ /dev/null @@ -1,75 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -import logging -from typing import TYPE_CHECKING, Any - -from fastapi import FastAPI - -from airflow.api_fastapi.app import create_auth_manager -from airflow.plugins_manager import AirflowPlugin -from airflow.providers.standard.version_compat import AIRFLOW_V_3_1_PLUS -from airflow.utils.session import NEW_SESSION, provide_session - -log = logging.getLogger(__name__) - -if TYPE_CHECKING: - from sqlalchemy.orm import Session - -if AIRFLOW_V_3_1_PLUS: - - @provide_session - def _get_api_endpoint(session: Session = NEW_SESSION) -> dict[str, Any]: - from airflow.utils.db import DBLocks, create_global_lock - - with create_global_lock(session=session, lock=DBLocks.MIGRATIONS): - engine = session.get_bind().engine - from airflow.providers.standard.models import HITLResponseModel - - HITLResponseModel.metadata.create_all(engine) - - from airflow.providers.standard.api_fastapi.core_api.routes.hitl import hitl_router - - hitl_api_app = FastAPI( - title="Airflow Human-in-the-loop API", - description=( - "This is Airflow Human-in-the-loop API - which allow human interactions." - "You can find more information in AIP-90 " - "https://cwiki.apache.org/confluence/display/AIRFLOW/AIP-90+Human+in+the+loop" - ), - ) - hitl_api_app.include_router(hitl_router) - am = create_auth_manager() - am.init() - hitl_api_app.state.auth_manager = am - - return { - "app": hitl_api_app, - "url_prefix": "/hitl-responses", - "name": "Airflow Human in the loop API", - } - - -class HumanInTheLoopPlugin(AirflowPlugin): - """Human in the loop plugin for Airflow.""" - - name = "standard_hitl" - if AIRFLOW_V_3_1_PLUS: - fastapi_apps = [_get_api_endpoint()] - else: - log.warning("Human in the loop functionality needs Airflow 3.1+. Skip loadding HITLDBManager.") diff --git a/providers/standard/tests/unit/standard/api/__init__.py b/providers/standard/tests/unit/standard/api/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/standard/tests/unit/standard/api/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/standard/tests/unit/standard/api/test_client.py b/providers/standard/tests/unit/standard/api/test_client.py deleted file mode 100644 index 527b6d5c2cf3a..0000000000000 --- a/providers/standard/tests/unit/standard/api/test_client.py +++ /dev/null @@ -1,136 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -from datetime import datetime -from typing import TYPE_CHECKING - -import httpx -from uuid6 import uuid7 - -from airflow.providers.standard.api_fastapi.execution_api.datamodels.hitl import HITLResponseContentDetail -from airflow.providers.standard.execution_time.comms import HITLInputRequestResponseResult -from airflow.sdk.api.client import Client -from airflow.utils import timezone - -if TYPE_CHECKING: - from time_machine import TimeMachineFixture - - -def make_client(transport: httpx.MockTransport) -> Client: - """Get a client with a custom transport.""" - return Client(base_url="test://server", token="", transport=transport) - - -class TestHITLOperations: - def test_add_response(self) -> None: - ti_id = uuid7() - - def handle_request(request: httpx.Request) -> httpx.Response: - if request.url.path in (f"/hitl-responses/{ti_id}"): - return httpx.Response( - status_code=201, - json={ - "ti_id": str(ti_id), - "options": ["Approval", "Reject"], - "subject": "This is subject", - "body": "This is body", - "default": ["Approval"], - "params": None, - "multiple": False, - }, - ) - return httpx.Response(status_code=400, json={"detail": "Bad Request"}) - - client = make_client(transport=httpx.MockTransport(handle_request)) - result = client.hitl.add_response( - ti_id=ti_id, - options=["Approval", "Reject"], - subject="This is subject", - body="This is body", - default=["Approval"], - params=None, - multiple=False, - ) - assert isinstance(result, HITLInputRequestResponseResult) - assert result.ti_id == ti_id - assert result.options == ["Approval", "Reject"] - assert result.subject == "This is subject" - assert result.body == "This is body" - assert result.default == ["Approval"] - assert result.params is None - assert result.multiple is False - - def test_update_response(self, time_machine: TimeMachineFixture) -> None: - time_machine.move_to(datetime(2025, 7, 3, 0, 0, 0)) - ti_id = uuid7() - - def handle_request(request: httpx.Request) -> httpx.Response: - if request.url.path in (f"/hitl-responses/{ti_id}"): - return httpx.Response( - status_code=200, - json={ - "response_content": "Approval", - "params_input": None, - "user_id": "admin", - "response_received": True, - "response_at": "2025-07-03T00:00:00Z", - }, - ) - return httpx.Response(status_code=400, json={"detail": "Bad Request"}) - - client = make_client(transport=httpx.MockTransport(handle_request)) - result = client.hitl.update_response( - ti_id=ti_id, - response_content="Approve", - params_input=None, - ) - assert isinstance(result, HITLResponseContentDetail) - assert result.response_received is True - assert result.response_content == "Approval" - assert result.params_input is None - assert result.user_id == "admin" - assert result.response_at == timezone.datetime(2025, 7, 3, 0, 0, 0) - - def test_get_response_content_detail(self, time_machine: TimeMachineFixture) -> None: - time_machine.move_to(datetime(2025, 7, 3, 0, 0, 0)) - ti_id = uuid7() - - def handle_request(request: httpx.Request) -> httpx.Response: - if request.url.path in (f"/hitl-responses/{ti_id}"): - return httpx.Response( - status_code=200, - json={ - "response_content": "Approval", - "params_input": None, - "user_id": "admin", - "response_received": True, - "response_at": "2025-07-03T00:00:00Z", - }, - ) - return httpx.Response(status_code=400, json={"detail": "Bad Request"}) - - client = make_client(transport=httpx.MockTransport(handle_request)) - result = client.hitl.get_response_content_detail( - ti_id=ti_id, - ) - assert isinstance(result, HITLResponseContentDetail) - assert result.response_received is True - assert result.response_content == "Approval" - assert result.params_input is None - assert result.user_id == "admin" - assert result.response_at == timezone.datetime(2025, 7, 3, 0, 0, 0) diff --git a/providers/standard/tests/unit/standard/api_fastapi/core_api/__init__.py b/providers/standard/tests/unit/standard/api_fastapi/core_api/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/standard/tests/unit/standard/api_fastapi/core_api/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/standard/tests/unit/standard/api_fastapi/core_api/datamodels/__init__.py b/providers/standard/tests/unit/standard/api_fastapi/core_api/datamodels/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/standard/tests/unit/standard/api_fastapi/core_api/datamodels/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/standard/tests/unit/standard/api_fastapi/core_api/datamodels/test_hitl.py b/providers/standard/tests/unit/standard/api_fastapi/core_api/datamodels/test_hitl.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/standard/tests/unit/standard/api_fastapi/core_api/datamodels/test_hitl.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/standard/tests/unit/standard/api_fastapi/core_api/routes/__init__.py b/providers/standard/tests/unit/standard/api_fastapi/core_api/routes/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/standard/tests/unit/standard/api_fastapi/core_api/routes/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/standard/tests/unit/standard/api_fastapi/core_api/routes/test_hitl.py b/providers/standard/tests/unit/standard/api_fastapi/core_api/routes/test_hitl.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/standard/tests/unit/standard/api_fastapi/core_api/routes/test_hitl.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/standard/tests/unit/standard/api_fastapi/execution_api/__init__.py b/providers/standard/tests/unit/standard/api_fastapi/execution_api/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/standard/tests/unit/standard/api_fastapi/execution_api/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/standard/tests/unit/standard/api_fastapi/execution_api/datamodels/__init__.py b/providers/standard/tests/unit/standard/api_fastapi/execution_api/datamodels/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/standard/tests/unit/standard/api_fastapi/execution_api/datamodels/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/standard/tests/unit/standard/api_fastapi/execution_api/datamodels/test_hitl.py b/providers/standard/tests/unit/standard/api_fastapi/execution_api/datamodels/test_hitl.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/standard/tests/unit/standard/api_fastapi/execution_api/datamodels/test_hitl.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/standard/tests/unit/standard/api_fastapi/execution_api/routes/__init__.py b/providers/standard/tests/unit/standard/api_fastapi/execution_api/routes/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/standard/tests/unit/standard/api_fastapi/execution_api/routes/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/standard/tests/unit/standard/api_fastapi/execution_api/routes/test_hitl.py b/providers/standard/tests/unit/standard/api_fastapi/execution_api/routes/test_hitl.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/standard/tests/unit/standard/api_fastapi/execution_api/routes/test_hitl.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/standard/tests/unit/standard/execution_time/__init__.py b/providers/standard/tests/unit/standard/execution_time/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/standard/tests/unit/standard/execution_time/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/standard/tests/unit/standard/execution_time/test_comms.py b/providers/standard/tests/unit/standard/execution_time/test_comms.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/standard/tests/unit/standard/execution_time/test_comms.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/standard/tests/unit/standard/execution_time/test_hitl.py b/providers/standard/tests/unit/standard/execution_time/test_hitl.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/standard/tests/unit/standard/execution_time/test_hitl.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/standard/tests/unit/standard/migrations/__init__.py b/providers/standard/tests/unit/standard/migrations/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/standard/tests/unit/standard/migrations/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/standard/tests/unit/standard/migrations/test_env.py b/providers/standard/tests/unit/standard/migrations/test_env.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/standard/tests/unit/standard/migrations/test_env.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/standard/tests/unit/standard/models/__init__.py b/providers/standard/tests/unit/standard/models/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/standard/tests/unit/standard/models/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/standard/tests/unit/standard/models/test_db.py b/providers/standard/tests/unit/standard/models/test_db.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/standard/tests/unit/standard/models/test_db.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/standard/tests/unit/standard/plugins/__init__.py b/providers/standard/tests/unit/standard/plugins/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/standard/tests/unit/standard/plugins/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/standard/tests/unit/standard/plugins/test_hitl.py b/providers/standard/tests/unit/standard/plugins/test_hitl.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/standard/tests/unit/standard/plugins/test_hitl.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/pyproject.toml b/pyproject.toml index 3d6cbf2ee5bed..a0b4af0e9a9bc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -726,7 +726,6 @@ testing = ["dev", "providers.tests", "tests_common", "tests", "system", "unit", # Annotated is central for FastAPI dependency injection, skipping rules for FastAPI folders. "airflow-core/src/airflow/api_fastapi/*" = ["TC001", "TC002"] "airflow-core/tests/unit/api_fastapi/*" = ["T001", "TC002"] -"providers/standard/src/airflow/providers/standard/api_fastapi/*" = ["TC001", "TC002"] # Ignore pydoc style from these "*.pyi" = ["D"] diff --git a/task-sdk/src/airflow/sdk/api/client.py b/task-sdk/src/airflow/sdk/api/client.py index 47b09a8d2a89c..45d35bac0ff9e 100644 --- a/task-sdk/src/airflow/sdk/api/client.py +++ b/task-sdk/src/airflow/sdk/api/client.py @@ -17,7 +17,6 @@ from __future__ import annotations -import contextlib import logging import sys import uuid @@ -39,8 +38,10 @@ AssetEventsResponse, AssetResponse, ConnectionResponse, + CreateHITLResponsePayload, DagRunStateResponse, DagRunType, + HITLResponseContentDetail, InactiveAssetsResponse, PrevSuccessfulDagRunResponse, TaskInstanceState, @@ -56,6 +57,7 @@ TISuccessStatePayload, TITerminalStatePayload, TriggerDAGRunPayload, + UpdateHITLResponse, ValidationError as RemoteValidationError, VariablePostBody, VariableResponse, @@ -67,6 +69,7 @@ from airflow.sdk.execution_time.comms import ( DRCount, ErrorResponse, + HITLInputRequestResponseResult, OKResponse, SkipDownstreamTasks, TaskRescheduleStartDate, @@ -619,6 +622,70 @@ def get_count( return DRCount(count=resp.json()) +class HITLOperations: + """ + Operations related to Human in the loop. Require Airflow 3.1+. + + :meta: private + """ + + __slots__ = ("client",) + + def __init__(self, client: Client) -> None: + self.client = client + + def add_response( + self, + *, + ti_id: uuid.UUID, + options: list[str], + subject: str, + body: str | None = None, + default: list[str] | None = None, + multiple: bool = False, + params: dict[str, Any] | None = None, + ) -> HITLInputRequestResponseResult: + """Add a Human-in-the-loop response that waits for human response for a specific Task Instance.""" + payload = CreateHITLResponsePayload( + ti_id=ti_id, + options=options, + subject=subject, + body=body, + default=default, + multiple=multiple, + params=params, + ) + resp = self.client.post( + f"/hitl-responses/{ti_id}", + content=payload.model_dump_json(), + ) + return HITLInputRequestResponseResult.model_validate_json(resp.read()) + + def update_response( + self, + *, + ti_id: uuid.UUID, + response_content: str, + params_input: dict[str, Any] | None = None, + ) -> HITLResponseContentDetail: + """Update an existing Human-in-the-loop response.""" + payload = UpdateHITLResponse( + ti_id=ti_id, + response_content=response_content, + params_input=params_input, + ) + resp = self.client.patch( + f"/hitl-responses/{ti_id}", + content=payload.model_dump_json(), + ) + return HITLResponseContentDetail.model_validate_json(resp.read()) + + def get_response_content_detail(self, ti_id: uuid.UUID) -> HITLResponseContentDetail: + """Get content part of a Human-in-the-loop response for a specific Task Instance.""" + resp = self.client.get(f"/hitl-responses/{ti_id}") + return HITLResponseContentDetail.model_validate_json(resp.read()) + + class BearerAuth(httpx.Auth): def __init__(self, token: str): self.token: str = token @@ -752,16 +819,11 @@ def asset_events(self) -> AssetEventOperations: """Operations related to Asset Events.""" return AssetEventOperations(self) - # TODO: Remove this block once we can make the execution API pluggable. - with contextlib.suppress(ModuleNotFoundError): - - @lru_cache() # type: ignore[misc] - @property - def hitl(self): - from airflow.providers.standard.api.client import HITLOperations - - """Operations related to HITL Responses.""" - return HITLOperations(self) + @lru_cache() # type: ignore[misc] + @property + def hitl(self): + """Operations related to HITL Responses.""" + return HITLOperations(self) # This is only used for parsing. ServerResponseError is raised instead diff --git a/task-sdk/src/airflow/sdk/execution_time/comms.py b/task-sdk/src/airflow/sdk/execution_time/comms.py index 00c6d278381f6..f2ece3997151d 100644 --- a/task-sdk/src/airflow/sdk/execution_time/comms.py +++ b/task-sdk/src/airflow/sdk/execution_time/comms.py @@ -63,6 +63,10 @@ from fastapi import Body from pydantic import AwareDatetime, BaseModel, ConfigDict, Field, JsonValue, TypeAdapter, field_serializer +from airflow.api_fastapi.execution_api.datamodels.hitl import ( + GetHITLResponseContentDetailPayload, + UpdateHITLResponsePayload, +) from airflow.sdk.api.datamodels._generated import ( AssetEventDagRunReference, AssetEventResponse, @@ -71,6 +75,7 @@ BundleInfo, ConnectionResponse, DagRunStateResponse, + HITLInputRequestResponse, InactiveAssetsResponse, PrevSuccessfulDagRunResponse, TaskInstance, @@ -96,20 +101,6 @@ # Available on Unix and Windows (so "everywhere") but lets be safe recv_fds = None # type: ignore[assignment] -# TODO: Remove this block once we can make the execution API pluggable. -try: - from airflow.providers.standard.execution_time.comms import ( - CreateHITLResponsePayload, - GetHITLResponseContentDetail, - HITLInputRequestResponseResult, - UpdateHITLResponse, - ) -except ModuleNotFoundError: - GetHITLResponseContentDetail = object # type: ignore[misc, assignment] - CreateHITLResponsePayload = object # type: ignore[misc, assignment] - HITLInputRequestResponseResult = object # type: ignore[misc, assignment] - UpdateHITLResponse = object # type: ignore[misc, assignment] - if TYPE_CHECKING: from structlog.typing import FilteringBoundLogger as Logger @@ -572,6 +563,18 @@ class SentFDs(BaseModel): fds: list[int] +class CreateHITLResponsePayload(HITLInputRequestResponse): + """Add the input request part of a Human-in-the-loop response.""" + + type: Literal["CreateHITLResponsePayload"] = "CreateHITLResponsePayload" + + +class HITLInputRequestResponseResult(HITLInputRequestResponse): + """Response to CreateHITLResponsePayload request.""" + + type: Literal["HITLInputRequestResponseResult"] = "HITLInputRequestResponseResult" + + ToTask = Annotated[ AssetResult | AssetEventsResult @@ -855,6 +858,18 @@ class GetDRCount(BaseModel): type: Literal["GetDRCount"] = "GetDRCount" +class GetHITLResponseContentDetail(GetHITLResponseContentDetailPayload): + """Get the response content part of a Human-in-the-loop response.""" + + type: Literal["GetHITLResponseContentDetail"] = "GetHITLResponseContentDetail" + + +class UpdateHITLResponse(UpdateHITLResponsePayload): + """Update the response content part of an existing Human-in-the-loop response.""" + + type: Literal["UpdateHITLResponse"] = "UpdateHITLResponse" + + ToSupervisor = Annotated[ DeferTask | DeleteXCom @@ -886,7 +901,6 @@ class GetDRCount(BaseModel): | TriggerDagRun | DeleteVariable | ResendLoggingFD - # HITL response from standard provider | CreateHITLResponsePayload | UpdateHITLResponse | GetHITLResponseContentDetail, diff --git a/providers/standard/src/airflow/providers/standard/execution_time/hitl.py b/task-sdk/src/airflow/sdk/execution_time/hitl.py similarity index 89% rename from providers/standard/src/airflow/providers/standard/execution_time/hitl.py rename to task-sdk/src/airflow/sdk/execution_time/hitl.py index f7c45cf2ee2d1..9fb2184f29cfd 100644 --- a/providers/standard/src/airflow/providers/standard/execution_time/hitl.py +++ b/task-sdk/src/airflow/sdk/execution_time/hitl.py @@ -14,30 +14,31 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. + from __future__ import annotations from collections.abc import MutableMapping -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any from uuid import UUID -from airflow.providers.standard.execution_time.comms import ( +from airflow.sdk.execution_time.comms import ( CreateHITLResponsePayload, GetHITLResponseContentDetail, UpdateHITLResponse, ) if TYPE_CHECKING: - from airflow.providers.standard.api_fastapi.execution_api.datamodels.hitl import HITLResponseContentDetail + from airflow.api_fastapi.execution_api.datamodels.hitl import HITLResponseContentDetail -def add_hitl_input_request( +def add_hitl_response( ti_id: UUID, options: list[str], subject: str, body: str | None = None, default: list[str] | None = None, multiple: bool = False, - params: MutableMapping | None = None, + params: dict[str, Any] | None = None, ) -> None: from airflow.sdk.execution_time.task_runner import SUPERVISOR_COMMS diff --git a/task-sdk/src/airflow/sdk/execution_time/supervisor.py b/task-sdk/src/airflow/sdk/execution_time/supervisor.py index 1451cbfe40505..1c466c9f9a8f5 100644 --- a/task-sdk/src/airflow/sdk/execution_time/supervisor.py +++ b/task-sdk/src/airflow/sdk/execution_time/supervisor.py @@ -20,7 +20,6 @@ from __future__ import annotations import atexit -import contextlib import io import logging import os @@ -69,6 +68,7 @@ AssetEventsResult, AssetResult, ConnectionResult, + CreateHITLResponsePayload, DagRunStateResult, DeferTask, DeleteVariable, @@ -1231,24 +1231,18 @@ def _handle_request(self, msg: ToSupervisor, log: FilteringBoundLogger, req_id: self._send_new_log_fd(req_id) # Since we've sent the message, return. Nothing else in this ifelse/switch should return directly return + elif isinstance(msg, CreateHITLResponsePayload): + resp = self.client.hitl.add_response( + ti_id=msg.ti_id, + options=msg.options, + subject=msg.subject, + body=msg.body, + default=msg.default, + params=msg.params, + multiple=msg.multiple, + ) + self.send_msg(resp, request_id=req_id, error=None, **dump_opts) else: - # TODO: Remove this block once we can make the execution API pluggable. - with contextlib.suppress(ModuleNotFoundError): - from airflow.providers.standard.execution_time.comms import CreateHITLResponsePayload - - if isinstance(msg, CreateHITLResponsePayload): - resp = self.client.hitl.add_response( - ti_id=msg.ti_id, - options=msg.options, - subject=msg.subject, - body=msg.body, - default=msg.default, - params=msg.params, - multiple=msg.multiple, - ) - self.send_msg(resp, request_id=req_id, error=None, **dump_opts) - return - log.error("Unhandled request", msg=msg) self.send_msg( None, From e356520f73cac2ec5e0a5087fd47ab6124a5d5bc Mon Sep 17 00:00:00 2001 From: Wei Lee Date: Fri, 4 Jul 2025 19:48:02 +0800 Subject: [PATCH 03/30] feat(hitl): move models --- .../api_fastapi/execution_api/routes/hitl.py | 2 +- .../src/airflow/models/hitl.py | 2 +- .../standard/api_fastapi/core_api/routes/hitl.py | 1 - .../tests/unit/standard/api_fastapi/__init__.py | 16 ---------------- 4 files changed, 2 insertions(+), 19 deletions(-) rename providers/standard/src/airflow/providers/standard/models/__init__.py => airflow-core/src/airflow/models/hitl.py (98%) delete mode 100644 providers/standard/tests/unit/standard/api_fastapi/__init__.py diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/routes/hitl.py b/airflow-core/src/airflow/api_fastapi/execution_api/routes/hitl.py index 74fa3a4fa2696..e5316ec36aebd 100644 --- a/airflow-core/src/airflow/api_fastapi/execution_api/routes/hitl.py +++ b/airflow-core/src/airflow/api_fastapi/execution_api/routes/hitl.py @@ -28,7 +28,7 @@ HITLInputRequestResponse, HITLResponseContentDetail, ) -from airflow.providers.standard.models import HITLResponseModel +from airflow.models.hitl import HITLResponseModel from airflow.sdk.execution_time.comms import CreateHITLResponsePayload, UpdateHITLResponse router = APIRouter() diff --git a/providers/standard/src/airflow/providers/standard/models/__init__.py b/airflow-core/src/airflow/models/hitl.py similarity index 98% rename from providers/standard/src/airflow/providers/standard/models/__init__.py rename to airflow-core/src/airflow/models/hitl.py index cb558a7aee2bb..62a5d37f26657 100644 --- a/providers/standard/src/airflow/providers/standard/models/__init__.py +++ b/airflow-core/src/airflow/models/hitl.py @@ -47,7 +47,7 @@ class HITLResponseModel(Base): nullable=False, ) - # Input Request + # User Request Detail options = Column(sqlalchemy_jsonfield.JSONField(json=json), nullable=False) subject = Column(Text, nullable=False) body = Column(Text, nullable=True) diff --git a/providers/standard/src/airflow/providers/standard/api_fastapi/core_api/routes/hitl.py b/providers/standard/src/airflow/providers/standard/api_fastapi/core_api/routes/hitl.py index 2d96915fd6075..241e155463c3c 100644 --- a/providers/standard/src/airflow/providers/standard/api_fastapi/core_api/routes/hitl.py +++ b/providers/standard/src/airflow/providers/standard/api_fastapi/core_api/routes/hitl.py @@ -35,7 +35,6 @@ HITLResponseDetailCollection, UpdateHITLResponsePayload, ) -from airflow.providers.standard.models import HITLResponseModel from airflow.utils import timezone hitl_router = AirflowRouter(tags=["HumanInTheLoop"]) diff --git a/providers/standard/tests/unit/standard/api_fastapi/__init__.py b/providers/standard/tests/unit/standard/api_fastapi/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/standard/tests/unit/standard/api_fastapi/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. From 633b74b10b5a9a46a4d51be513653faca5110f1f Mon Sep 17 00:00:00 2001 From: Wei Lee Date: Fri, 4 Jul 2025 19:54:00 +0800 Subject: [PATCH 04/30] feat(hitl): move core_api from provider to core --- .../api_fastapi/core_api/datamodels/hitl.py | 0 .../api_fastapi/core_api/routes/public}/hitl.py | 12 ++++++------ airflow-core/src/airflow/models/hitl.py | 15 ++------------- .../api_fastapi/execution_api/__init__.py | 16 ---------------- .../execution_api/datamodels/__init__.py | 16 ---------------- .../api_fastapi/execution_api/routes/__init__.py | 16 ---------------- .../core_api/routes => models}/__init__.py | 0 7 files changed, 8 insertions(+), 67 deletions(-) rename {providers/standard/src/airflow/providers/standard => airflow-core/src/airflow}/api_fastapi/core_api/datamodels/hitl.py (100%) rename {providers/standard/src/airflow/providers/standard/api_fastapi/core_api/routes => airflow-core/src/airflow/api_fastapi/core_api/routes/public}/hitl.py (96%) delete mode 100644 providers/standard/src/airflow/providers/standard/api_fastapi/execution_api/__init__.py delete mode 100644 providers/standard/src/airflow/providers/standard/api_fastapi/execution_api/datamodels/__init__.py delete mode 100644 providers/standard/src/airflow/providers/standard/api_fastapi/execution_api/routes/__init__.py rename providers/standard/src/airflow/providers/standard/{api_fastapi/core_api/routes => models}/__init__.py (100%) diff --git a/providers/standard/src/airflow/providers/standard/api_fastapi/core_api/datamodels/hitl.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/hitl.py similarity index 100% rename from providers/standard/src/airflow/providers/standard/api_fastapi/core_api/datamodels/hitl.py rename to airflow-core/src/airflow/api_fastapi/core_api/datamodels/hitl.py diff --git a/providers/standard/src/airflow/providers/standard/api_fastapi/core_api/routes/hitl.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/hitl.py similarity index 96% rename from providers/standard/src/airflow/providers/standard/api_fastapi/core_api/routes/hitl.py rename to airflow-core/src/airflow/api_fastapi/core_api/routes/public/hitl.py index 241e155463c3c..1a3dde1cebb08 100644 --- a/providers/standard/src/airflow/providers/standard/api_fastapi/core_api/routes/hitl.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/hitl.py @@ -25,19 +25,19 @@ from airflow.api_fastapi.auth.managers.models.resource_details import DagAccessEntity from airflow.api_fastapi.common.db.common import SessionDep, paginated_select from airflow.api_fastapi.common.router import AirflowRouter -from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc -from airflow.api_fastapi.core_api.security import GetUserDep, ReadableTIFilterDep, requires_access_dag -from airflow.models import HITLResponseModel -from airflow.models.taskinstance import TaskInstance as TI -from airflow.providers.standard.api_fastapi.core_api.datamodels.hitl import ( +from airflow.api_fastapi.core_api.datamodels.hitl import ( HITLResponseContentDetail, HITLResponseDetail, HITLResponseDetailCollection, UpdateHITLResponsePayload, ) +from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc +from airflow.api_fastapi.core_api.security import GetUserDep, ReadableTIFilterDep, requires_access_dag +from airflow.models.hitl import HITLResponseModel +from airflow.models.taskinstance import TaskInstance as TI from airflow.utils import timezone -hitl_router = AirflowRouter(tags=["HumanInTheLoop"]) +hitl_router = AirflowRouter(tags=["HumanInTheLoop"], prefix="/hitl-responses") log = structlog.get_logger(__name__) diff --git a/airflow-core/src/airflow/models/hitl.py b/airflow-core/src/airflow/models/hitl.py index 62a5d37f26657..070e1151a2298 100644 --- a/airflow-core/src/airflow/models/hitl.py +++ b/airflow-core/src/airflow/models/hitl.py @@ -16,26 +16,15 @@ # under the License. from __future__ import annotations -from typing import TYPE_CHECKING, Any - import sqlalchemy_jsonfield -from sqlalchemy import Boolean, Column, MetaData, String, Text +from sqlalchemy import Boolean, Column, String, Text from sqlalchemy.dialects import postgresql from sqlalchemy.ext.hybrid import hybrid_property -from sqlalchemy.orm import registry -from airflow.models.base import _get_schema, naming_convention +from airflow.models.base import Base from airflow.settings import json from airflow.utils.sqlalchemy import UtcDateTime -metadata = MetaData(schema=_get_schema(), naming_convention=naming_convention) -mapper_registry = registry(metadata=metadata) - -if TYPE_CHECKING: - Base = Any # type: ignore[misc] -else: - Base = mapper_registry.generate_base() - class HITLResponseModel(Base): """Human-in-the-loop received response.""" diff --git a/providers/standard/src/airflow/providers/standard/api_fastapi/execution_api/__init__.py b/providers/standard/src/airflow/providers/standard/api_fastapi/execution_api/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/standard/src/airflow/providers/standard/api_fastapi/execution_api/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/standard/src/airflow/providers/standard/api_fastapi/execution_api/datamodels/__init__.py b/providers/standard/src/airflow/providers/standard/api_fastapi/execution_api/datamodels/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/standard/src/airflow/providers/standard/api_fastapi/execution_api/datamodels/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/standard/src/airflow/providers/standard/api_fastapi/execution_api/routes/__init__.py b/providers/standard/src/airflow/providers/standard/api_fastapi/execution_api/routes/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/standard/src/airflow/providers/standard/api_fastapi/execution_api/routes/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/standard/src/airflow/providers/standard/api_fastapi/core_api/routes/__init__.py b/providers/standard/src/airflow/providers/standard/models/__init__.py similarity index 100% rename from providers/standard/src/airflow/providers/standard/api_fastapi/core_api/routes/__init__.py rename to providers/standard/src/airflow/providers/standard/models/__init__.py From c6fd45829ec7bce0468d4a838ee4777ee50742fb Mon Sep 17 00:00:00 2001 From: Wei Lee Date: Fri, 4 Jul 2025 23:35:26 +0800 Subject: [PATCH 05/30] feat(hitl): add migration files --- ...75_3_1_0_add_human_in_the_loop_response.py | 72 +++++++++++++++++++ airflow-core/src/airflow/models/hitl.py | 1 - 2 files changed, 72 insertions(+), 1 deletion(-) create mode 100644 airflow-core/src/airflow/migrations/versions/0075_3_1_0_add_human_in_the_loop_response.py diff --git a/airflow-core/src/airflow/migrations/versions/0075_3_1_0_add_human_in_the_loop_response.py b/airflow-core/src/airflow/migrations/versions/0075_3_1_0_add_human_in_the_loop_response.py new file mode 100644 index 0000000000000..84509126789e0 --- /dev/null +++ b/airflow-core/src/airflow/migrations/versions/0075_3_1_0_add_human_in_the_loop_response.py @@ -0,0 +1,72 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Add Human In the Loop Response table. + +Revision ID: 40f7c30a228b +Revises: 66a7743fe20e +Create Date: 2025-07-04 15:05:19.459197 + +""" + +from __future__ import annotations + +import sqlalchemy_jsonfield +from alembic import op +from sqlalchemy import Boolean, Column, String, Text +from sqlalchemy.dialects import postgresql + +from airflow.settings import json +from airflow.utils.sqlalchemy import UtcDateTime + +# revision identifiers, used by Alembic. +revision = "40f7c30a228b" +down_revision = "66a7743fe20e" +branch_labels = None +depends_on = None + +airflow_version = "3.1.0" + + +def upgrade(): + """Add Human In the Loop Response table.""" + op.create_table( + "hitl_response", + Column( + "ti_id", + String(length=36).with_variant(postgresql.UUID(), "postgresql"), + primary_key=True, + nullable=False, + ), + Column("options", sqlalchemy_jsonfield.JSONField(json=json), nullable=False), + Column("subject", Text, nullable=False), + Column("body", Text, nullable=True), + Column("default", sqlalchemy_jsonfield.JSONField(json=json), nullable=True), + Column("multiple", Boolean, unique=False, default=False), + Column("params", sqlalchemy_jsonfield.JSONField(json=json), nullable=True), + Column("response_at", UtcDateTime, nullable=True), + Column("user_id", String(128), nullable=True), + Column("response_content", sqlalchemy_jsonfield.JSONField(json=json), nullable=True), + Column("params_input", sqlalchemy_jsonfield.JSONField(json=json), nullable=True, default=None), + ) + + +def downgrade(): + """Response Human In the Loop Response table.""" + op.drop_table("hitl_response") diff --git a/airflow-core/src/airflow/models/hitl.py b/airflow-core/src/airflow/models/hitl.py index 070e1151a2298..d1b9a0266e7bd 100644 --- a/airflow-core/src/airflow/models/hitl.py +++ b/airflow-core/src/airflow/models/hitl.py @@ -42,7 +42,6 @@ class HITLResponseModel(Base): body = Column(Text, nullable=True) default = Column(sqlalchemy_jsonfield.JSONField(json=json), nullable=True) multiple = Column(Boolean, unique=False, default=False) - params = Column(sqlalchemy_jsonfield.JSONField(json=json), nullable=True) # Response Content Detail From 78cbc3cd33ec25516e96bd2ee8bdc83307f7210d Mon Sep 17 00:00:00 2001 From: Wei Lee Date: Sat, 5 Jul 2025 00:45:37 +0800 Subject: [PATCH 06/30] fix: fix ci issue --- airflow-core/docs/img/airflow_erd.sha256 | 2 +- airflow-core/docs/img/airflow_erd.svg | 3224 ++++++++--------- airflow-core/docs/migrations-ref.rst | 4 +- .../openapi/v2-rest-api-generated.yaml | 254 ++ ...7_3_1_0_add_human_in_the_loop_response.py} | 5 +- airflow-core/src/airflow/utils/db.py | 2 +- 6 files changed, 1762 insertions(+), 1729 deletions(-) rename airflow-core/src/airflow/migrations/versions/{0075_3_1_0_add_human_in_the_loop_response.py => 0077_3_1_0_add_human_in_the_loop_response.py} (97%) diff --git a/airflow-core/docs/img/airflow_erd.sha256 b/airflow-core/docs/img/airflow_erd.sha256 index c53cc36d331db..a5bf22a4f9d99 100644 --- a/airflow-core/docs/img/airflow_erd.sha256 +++ b/airflow-core/docs/img/airflow_erd.sha256 @@ -1 +1 @@ -e0de73aab81a28995b99be21dd25c8ca31c4e0f4a5a0a26df8aff412e5067fd5 \ No newline at end of file +7ac92245e16093fd93a67d14653f14d125867fe6c5e3f0ad94c44fb572e6facf \ No newline at end of file diff --git a/airflow-core/docs/img/airflow_erd.svg b/airflow-core/docs/img/airflow_erd.svg index 4877d50b50e19..5565970e5573f 100644 --- a/airflow-core/docs/img/airflow_erd.svg +++ b/airflow-core/docs/img/airflow_erd.svg @@ -4,11 +4,11 @@ - - + + %3 - + dag_priority_parsing_request @@ -247,567 +247,511 @@ [INTEGER] NOT NULL - - -hitl_response - -hitl_response - -ti_id - - [UUID] - NOT NULL - -body - - [TEXT] - -default - - [JSON] - -multiple - - [BOOLEAN] - -options - - [JSON] - NOT NULL - -params - - [JSON] - NOT NULL - -params_input - - [JSON] - NOT NULL - -response_at - - [TIMESTAMP] - -response_content - - [JSON] - -subject - - [TEXT] - NOT NULL - -user_id - - [VARCHAR(128)] - - + slot_pool - -slot_pool - -id - - [INTEGER] - NOT NULL - -description - - [TEXT] - -include_deferred - - [BOOLEAN] - NOT NULL - -pool - - [VARCHAR(256)] - -slots - - [INTEGER] + +slot_pool + +id + + [INTEGER] + NOT NULL + +description + + [TEXT] + +include_deferred + + [BOOLEAN] + NOT NULL + +pool + + [VARCHAR(256)] + +slots + + [INTEGER] - + import_error - -import_error - -id - - [INTEGER] - NOT NULL - -bundle_name - - [VARCHAR(250)] - -filename - - [VARCHAR(1024)] - -stacktrace - - [TEXT] - -timestamp - - [TIMESTAMP] + +import_error + +id + + [INTEGER] + NOT NULL + +bundle_name + + [VARCHAR(250)] + +filename + + [VARCHAR(1024)] + +stacktrace + + [TEXT] + +timestamp + + [TIMESTAMP] - + asset_alias - -asset_alias - -id - - [INTEGER] - NOT NULL - -group - - [VARCHAR(1500)] - NOT NULL - -name - - [VARCHAR(1500)] - NOT NULL + +asset_alias + +id + + [INTEGER] + NOT NULL + +group + + [VARCHAR(1500)] + NOT NULL + +name + + [VARCHAR(1500)] + NOT NULL - + asset_alias_asset - -asset_alias_asset - -alias_id - - [INTEGER] - NOT NULL - -asset_id - - [INTEGER] - NOT NULL + +asset_alias_asset + +alias_id + + [INTEGER] + NOT NULL + +asset_id + + [INTEGER] + NOT NULL asset_alias--asset_alias_asset - -0..N -1 + +0..N +1 - + asset_alias_asset_event - -asset_alias_asset_event - -alias_id - - [INTEGER] - NOT NULL - -event_id - - [INTEGER] - NOT NULL + +asset_alias_asset_event + +alias_id + + [INTEGER] + NOT NULL + +event_id + + [INTEGER] + NOT NULL asset_alias--asset_alias_asset_event - -0..N -1 + +0..N +1 - + dag_schedule_asset_alias_reference - -dag_schedule_asset_alias_reference - -alias_id - - [INTEGER] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL + +dag_schedule_asset_alias_reference + +alias_id + + [INTEGER] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL asset_alias--dag_schedule_asset_alias_reference - -0..N -1 + +0..N +1 - + asset - -asset - -id - - [INTEGER] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -extra - - [JSON] - NOT NULL - -group - - [VARCHAR(1500)] - NOT NULL - -name - - [VARCHAR(1500)] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL - -uri - - [VARCHAR(1500)] - NOT NULL + +asset + +id + + [INTEGER] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +extra + + [JSON] + NOT NULL + +group + + [VARCHAR(1500)] + NOT NULL + +name + + [VARCHAR(1500)] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL + +uri + + [VARCHAR(1500)] + NOT NULL asset--asset_alias_asset - -0..N -1 + +0..N +1 - + asset_trigger - -asset_trigger - -asset_id - - [INTEGER] - NOT NULL - -trigger_id - - [INTEGER] - NOT NULL + +asset_trigger + +asset_id + + [INTEGER] + NOT NULL + +trigger_id + + [INTEGER] + NOT NULL asset--asset_trigger - -0..N -1 + +0..N +1 - + asset_active - -asset_active - -name - - [VARCHAR(1500)] - NOT NULL - -uri - - [VARCHAR(1500)] - NOT NULL + +asset_active + +name + + [VARCHAR(1500)] + NOT NULL + +uri + + [VARCHAR(1500)] + NOT NULL asset--asset_active - -1 -1 + +1 +1 asset--asset_active - -1 -1 + +1 +1 - + dag_schedule_asset_reference - -dag_schedule_asset_reference - -asset_id - - [INTEGER] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL + +dag_schedule_asset_reference + +asset_id + + [INTEGER] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL asset--dag_schedule_asset_reference - -0..N -1 + +0..N +1 - + task_outlet_asset_reference - -task_outlet_asset_reference - -asset_id - - [INTEGER] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL + +task_outlet_asset_reference + +asset_id + + [INTEGER] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL asset--task_outlet_asset_reference - -0..N -1 + +0..N +1 - + task_inlet_asset_reference - -task_inlet_asset_reference - -asset_id - - [INTEGER] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL + +task_inlet_asset_reference + +asset_id + + [INTEGER] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL asset--task_inlet_asset_reference - -0..N -1 + +0..N +1 - + asset_dag_run_queue - -asset_dag_run_queue - -asset_id - - [INTEGER] - NOT NULL - -target_dag_id - - [VARCHAR(250)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL + +asset_dag_run_queue + +asset_id + + [INTEGER] + NOT NULL + +target_dag_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL asset--asset_dag_run_queue - -0..N -1 + +0..N +1 - + asset_event - -asset_event - -id - - [INTEGER] - NOT NULL - -asset_id - - [INTEGER] - NOT NULL - -extra - - [JSON] - NOT NULL - -source_dag_id - - [VARCHAR(250)] - -source_map_index - - [INTEGER] - -source_run_id - - [VARCHAR(250)] - -source_task_id - - [VARCHAR(250)] - -timestamp - - [TIMESTAMP] - NOT NULL + +asset_event + +id + + [INTEGER] + NOT NULL + +asset_id + + [INTEGER] + NOT NULL + +extra + + [JSON] + NOT NULL + +source_dag_id + + [VARCHAR(250)] + +source_map_index + + [INTEGER] + +source_run_id + + [VARCHAR(250)] + +source_task_id + + [VARCHAR(250)] + +timestamp + + [TIMESTAMP] + NOT NULL asset_event--asset_alias_asset_event - -0..N -1 + +0..N +1 - + dagrun_asset_event - -dagrun_asset_event - -dag_run_id - - [INTEGER] - NOT NULL - -event_id - - [INTEGER] - NOT NULL + +dagrun_asset_event + +dag_run_id + + [INTEGER] + NOT NULL + +event_id + + [INTEGER] + NOT NULL asset_event--dagrun_asset_event - -0..N -1 + +0..N +1 - + trigger - -trigger - -id - - [INTEGER] - NOT NULL - -classpath - - [VARCHAR(1000)] - NOT NULL - -created_date - - [TIMESTAMP] - NOT NULL - -kwargs - - [TEXT] - NOT NULL - -triggerer_id - - [INTEGER] + +trigger + +id + + [INTEGER] + NOT NULL + +classpath + + [VARCHAR(1000)] + NOT NULL + +created_date + + [TIMESTAMP] + NOT NULL + +kwargs + + [TEXT] + NOT NULL + +triggerer_id + + [INTEGER] trigger--asset_trigger - -0..N -1 + +0..N +1 - + task_instance -<<<<<<< HEAD task_instance @@ -967,1529 +911,1363 @@ updated_at [TIMESTAMP] -======= - -task_instance - -id - - [UUID] - NOT NULL - -context_carrier - - [JSONB] - -custom_operator_name - - [VARCHAR(1000)] - -dag_id - - [VARCHAR(250)] - NOT NULL - -dag_version_id - - [UUID] - -duration - - [DOUBLE_PRECISION] - -end_date - - [TIMESTAMP] - -executor - - [VARCHAR(1000)] - -executor_config - - [BYTEA] - -external_executor_id - - [VARCHAR(250)] - -hostname - - [VARCHAR(1000)] - -last_heartbeat_at - - [TIMESTAMP] - -map_index - - [INTEGER] - NOT NULL - -max_tries - - [INTEGER] - -next_kwargs - - [JSONB] - -next_method - - [VARCHAR(1000)] - -operator - - [VARCHAR(1000)] - -pid - - [INTEGER] - -pool - - [VARCHAR(256)] - NOT NULL - -pool_slots - - [INTEGER] - NOT NULL - -priority_weight - - [INTEGER] - -queue - - [VARCHAR(256)] - -queued_by_job_id - - [INTEGER] - -queued_dttm - - [TIMESTAMP] - -rendered_map_index - - [VARCHAR(250)] - -run_id - - [VARCHAR(250)] - NOT NULL - -scheduled_dttm - - [TIMESTAMP] - -span_status - - [VARCHAR(250)] - NOT NULL - -start_date - - [TIMESTAMP] - -state - - [VARCHAR(20)] - -task_display_name - - [VARCHAR(2000)] - -task_id - - [VARCHAR(250)] - NOT NULL - -trigger_id - - [INTEGER] - -trigger_timeout - - [TIMESTAMP] - -try_number - - [INTEGER] - -unixname - - [VARCHAR(1000)] - -updated_at - - [TIMESTAMP] ->>>>>>> fc78b17140f (feat: AIP-90 PoC) trigger--task_instance - -0..N -{0,1} + +0..N +{0,1} - + task_map - -task_map - -dag_id - - [VARCHAR(250)] - NOT NULL - -map_index - - [INTEGER] - NOT NULL - -run_id - - [VARCHAR(250)] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -keys - - [JSONB] - -length - - [INTEGER] - NOT NULL + +task_map + +dag_id + + [VARCHAR(250)] + NOT NULL + +map_index + + [INTEGER] + NOT NULL + +run_id + + [VARCHAR(250)] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +keys + + [JSONB] + +length + + [INTEGER] + NOT NULL task_instance--task_map - -0..N -1 + +0..N +1 task_instance--task_map - -0..N -1 + +0..N +1 task_instance--task_map - -0..N -1 + +0..N +1 task_instance--task_map - -0..N -1 + +0..N +1 - + task_reschedule - -task_reschedule - -id - - [INTEGER] - NOT NULL - -duration - - [INTEGER] - NOT NULL - -end_date - - [TIMESTAMP] - NOT NULL - -reschedule_date - - [TIMESTAMP] - NOT NULL - -start_date - - [TIMESTAMP] - NOT NULL - -ti_id - - [UUID] - NOT NULL + +task_reschedule + +id + + [INTEGER] + NOT NULL + +duration + + [INTEGER] + NOT NULL + +end_date + + [TIMESTAMP] + NOT NULL + +reschedule_date + + [TIMESTAMP] + NOT NULL + +start_date + + [TIMESTAMP] + NOT NULL + +ti_id + + [UUID] + NOT NULL task_instance--task_reschedule - -0..N -1 + +0..N +1 - + xcom - -xcom - -dag_run_id - - [INTEGER] - NOT NULL - -key - - [VARCHAR(512)] - NOT NULL - -map_index - - [INTEGER] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -run_id - - [VARCHAR(250)] - NOT NULL - -timestamp - - [TIMESTAMP] - NOT NULL - -value - - [JSONB] + +xcom + +dag_run_id + + [INTEGER] + NOT NULL + +key + + [VARCHAR(512)] + NOT NULL + +map_index + + [INTEGER] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +run_id + + [VARCHAR(250)] + NOT NULL + +timestamp + + [TIMESTAMP] + NOT NULL + +value + + [JSONB] task_instance--xcom - -0..N -1 + +0..N +1 task_instance--xcom - -0..N -1 + +0..N +1 task_instance--xcom - -0..N -1 + +0..N +1 task_instance--xcom - -0..N -1 + +0..N +1 - + task_instance_note - -task_instance_note - -ti_id - - [UUID] - NOT NULL - -content - - [VARCHAR(1000)] - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL - -user_id - - [VARCHAR(128)] + +task_instance_note + +ti_id + + [UUID] + NOT NULL + +content + + [VARCHAR(1000)] + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL + +user_id + + [VARCHAR(128)] task_instance--task_instance_note - -1 -1 + +1 +1 - + task_instance_history - -task_instance_history - -task_instance_id - - [UUID] - NOT NULL - -context_carrier - - [JSONB] - -custom_operator_name - - [VARCHAR(1000)] - -dag_id - - [VARCHAR(250)] - NOT NULL - -dag_version_id - - [UUID] - -duration - - [DOUBLE_PRECISION] - -end_date - - [TIMESTAMP] - -executor - - [VARCHAR(1000)] - -executor_config - - [BYTEA] - -external_executor_id - - [VARCHAR(250)] - -hostname - - [VARCHAR(1000)] - -map_index - - [INTEGER] - NOT NULL - -max_tries - - [INTEGER] - -next_kwargs - - [JSONB] - -next_method - - [VARCHAR(1000)] - -operator - - [VARCHAR(1000)] - -pid - - [INTEGER] - -pool - - [VARCHAR(256)] - NOT NULL - -pool_slots - - [INTEGER] - NOT NULL - -priority_weight - - [INTEGER] - -queue - - [VARCHAR(256)] - -queued_by_job_id - - [INTEGER] - -queued_dttm - - [TIMESTAMP] - -rendered_map_index - - [VARCHAR(250)] - -run_id - - [VARCHAR(250)] - NOT NULL - -scheduled_dttm - - [TIMESTAMP] - -span_status - - [VARCHAR(250)] - NOT NULL - -start_date - - [TIMESTAMP] - -state - - [VARCHAR(20)] - -task_display_name - - [VARCHAR(2000)] - -task_id - - [VARCHAR(250)] - NOT NULL - -trigger_id - - [INTEGER] - -trigger_timeout - - [TIMESTAMP] - -try_number - - [INTEGER] - NOT NULL - -unixname - - [VARCHAR(1000)] - -updated_at - - [TIMESTAMP] + +task_instance_history + +task_instance_id + + [UUID] + NOT NULL + +context_carrier + + [JSONB] + +custom_operator_name + + [VARCHAR(1000)] + +dag_id + + [VARCHAR(250)] + NOT NULL + +dag_version_id + + [UUID] + +duration + + [DOUBLE_PRECISION] + +end_date + + [TIMESTAMP] + +executor + + [VARCHAR(1000)] + +executor_config + + [BYTEA] + +external_executor_id + + [VARCHAR(250)] + +hostname + + [VARCHAR(1000)] + +map_index + + [INTEGER] + NOT NULL + +max_tries + + [INTEGER] + +next_kwargs + + [JSONB] + +next_method + + [VARCHAR(1000)] + +operator + + [VARCHAR(1000)] + +pid + + [INTEGER] + +pool + + [VARCHAR(256)] + NOT NULL + +pool_slots + + [INTEGER] + NOT NULL + +priority_weight + + [INTEGER] + +queue + + [VARCHAR(256)] + +queued_by_job_id + + [INTEGER] + +queued_dttm + + [TIMESTAMP] + +rendered_map_index + + [VARCHAR(250)] + +run_id + + [VARCHAR(250)] + NOT NULL + +scheduled_dttm + + [TIMESTAMP] + +span_status + + [VARCHAR(250)] + NOT NULL + +start_date + + [TIMESTAMP] + +state + + [VARCHAR(20)] + +task_display_name + + [VARCHAR(2000)] + +task_id + + [VARCHAR(250)] + NOT NULL + +trigger_id + + [INTEGER] + +trigger_timeout + + [TIMESTAMP] + +try_number + + [INTEGER] + NOT NULL + +unixname + + [VARCHAR(1000)] + +updated_at + + [TIMESTAMP] task_instance--task_instance_history - -0..N -1 + +0..N +1 task_instance--task_instance_history - -0..N -1 + +0..N +1 task_instance--task_instance_history - -0..N -1 + +0..N +1 task_instance--task_instance_history - -0..N -1 + +0..N +1 - + rendered_task_instance_fields - -rendered_task_instance_fields - -dag_id - - [VARCHAR(250)] - NOT NULL - -map_index - - [INTEGER] - NOT NULL - -run_id - - [VARCHAR(250)] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -k8s_pod_yaml - - [JSON] - -rendered_fields - - [JSON] - NOT NULL + +rendered_task_instance_fields + +dag_id + + [VARCHAR(250)] + NOT NULL + +map_index + + [INTEGER] + NOT NULL + +run_id + + [VARCHAR(250)] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +k8s_pod_yaml + + [JSON] + +rendered_fields + + [JSON] + NOT NULL task_instance--rendered_task_instance_fields - -0..N -1 + +0..N +1 task_instance--rendered_task_instance_fields - -0..N -1 + +0..N +1 task_instance--rendered_task_instance_fields - -0..N -1 + +0..N +1 task_instance--rendered_task_instance_fields - -0..N -1 + +0..N +1 - + dag_bundle - -dag_bundle - -name - - [VARCHAR(250)] - NOT NULL - -active - - [BOOLEAN] - -last_refreshed - - [TIMESTAMP] - -version - - [VARCHAR(200)] + +dag_bundle + +name + + [VARCHAR(250)] + NOT NULL + +active + + [BOOLEAN] + +last_refreshed + + [TIMESTAMP] + +version + + [VARCHAR(200)] - + dag - -dag - -dag_id - - [VARCHAR(250)] - NOT NULL - -asset_expression - - [JSON] - -bundle_name - - [VARCHAR(250)] - -bundle_version - - [VARCHAR(200)] - -dag_display_name - - [VARCHAR(2000)] - -deadline - - [JSON] - -description - - [TEXT] - -fileloc - - [VARCHAR(2000)] - -has_import_errors - - [BOOLEAN] - -has_task_concurrency_limits - - [BOOLEAN] - NOT NULL - -is_paused - - [BOOLEAN] - -is_stale - - [BOOLEAN] - -last_expired - - [TIMESTAMP] - -last_parsed_time - - [TIMESTAMP] - -max_active_runs - - [INTEGER] - -max_active_tasks - - [INTEGER] - NOT NULL - -max_consecutive_failed_dag_runs - - [INTEGER] - NOT NULL - -next_dagrun - - [TIMESTAMP] - -next_dagrun_create_after - - [TIMESTAMP] - -next_dagrun_data_interval_end - - [TIMESTAMP] - -next_dagrun_data_interval_start - - [TIMESTAMP] - -owners - - [VARCHAR(2000)] - -relative_fileloc - - [VARCHAR(2000)] - -timetable_description - - [VARCHAR(1000)] - -timetable_summary - - [TEXT] + +dag + +dag_id + + [VARCHAR(250)] + NOT NULL + +asset_expression + + [JSON] + +bundle_name + + [VARCHAR(250)] + +bundle_version + + [VARCHAR(200)] + +dag_display_name + + [VARCHAR(2000)] + +deadline + + [JSON] + +description + + [TEXT] + +fileloc + + [VARCHAR(2000)] + +has_import_errors + + [BOOLEAN] + +has_task_concurrency_limits + + [BOOLEAN] + NOT NULL + +is_paused + + [BOOLEAN] + +is_stale + + [BOOLEAN] + +last_expired + + [TIMESTAMP] + +last_parsed_time + + [TIMESTAMP] + +max_active_runs + + [INTEGER] + +max_active_tasks + + [INTEGER] + NOT NULL + +max_consecutive_failed_dag_runs + + [INTEGER] + NOT NULL + +next_dagrun + + [TIMESTAMP] + +next_dagrun_create_after + + [TIMESTAMP] + +next_dagrun_data_interval_end + + [TIMESTAMP] + +next_dagrun_data_interval_start + + [TIMESTAMP] + +owners + + [VARCHAR(2000)] + +relative_fileloc + + [VARCHAR(2000)] + +timetable_description + + [VARCHAR(1000)] + +timetable_summary + + [TEXT] dag_bundle--dag - -0..N -{0,1} + +0..N +{0,1} dag--dag_schedule_asset_alias_reference - -0..N -1 + +0..N +1 dag--dag_schedule_asset_reference - -0..N -1 + +0..N +1 dag--task_outlet_asset_reference - -0..N -1 + +0..N +1 dag--task_inlet_asset_reference - -0..N -1 + +0..N +1 dag--asset_dag_run_queue - -0..N -1 + +0..N +1 - + dag_schedule_asset_name_reference - -dag_schedule_asset_name_reference - -dag_id - - [VARCHAR(250)] - NOT NULL - -name - - [VARCHAR(1500)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL + +dag_schedule_asset_name_reference + +dag_id + + [VARCHAR(250)] + NOT NULL + +name + + [VARCHAR(1500)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL dag--dag_schedule_asset_name_reference - -0..N -1 + +0..N +1 - + dag_schedule_asset_uri_reference - -dag_schedule_asset_uri_reference - -dag_id - - [VARCHAR(250)] - NOT NULL - -uri - - [VARCHAR(1500)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL + +dag_schedule_asset_uri_reference + +dag_id + + [VARCHAR(250)] + NOT NULL + +uri + + [VARCHAR(1500)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL dag--dag_schedule_asset_uri_reference - -0..N -1 + +0..N +1 - + dag_version - -dag_version - -id - - [UUID] - NOT NULL - -bundle_name - - [VARCHAR(250)] - -bundle_version - - [VARCHAR(250)] - -created_at - - [TIMESTAMP] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -last_updated - - [TIMESTAMP] - NOT NULL - -version_number - - [INTEGER] - NOT NULL + +dag_version + +id + + [UUID] + NOT NULL + +bundle_name + + [VARCHAR(250)] + +bundle_version + + [VARCHAR(250)] + +created_at + + [TIMESTAMP] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +last_updated + + [TIMESTAMP] + NOT NULL + +version_number + + [INTEGER] + NOT NULL dag--dag_version - -0..N -1 + +0..N +1 - + dag_tag - -dag_tag - -dag_id - - [VARCHAR(250)] - NOT NULL - -name - - [VARCHAR(100)] - NOT NULL + +dag_tag + +dag_id + + [VARCHAR(250)] + NOT NULL + +name + + [VARCHAR(100)] + NOT NULL dag--dag_tag - -0..N -1 + +0..N +1 - + dag_owner_attributes - -dag_owner_attributes - -dag_id - - [VARCHAR(250)] - NOT NULL - -owner - - [VARCHAR(500)] - NOT NULL - -link - - [VARCHAR(500)] - NOT NULL + +dag_owner_attributes + +dag_id + + [VARCHAR(250)] + NOT NULL + +owner + + [VARCHAR(500)] + NOT NULL + +link + + [VARCHAR(500)] + NOT NULL dag--dag_owner_attributes - -0..N -1 + +0..N +1 - + dag_warning - -dag_warning - -dag_id - - [VARCHAR(250)] - NOT NULL - -warning_type - - [VARCHAR(50)] - NOT NULL - -message - - [TEXT] - NOT NULL - -timestamp - - [TIMESTAMP] - NOT NULL + +dag_warning + +dag_id + + [VARCHAR(250)] + NOT NULL + +warning_type + + [VARCHAR(50)] + NOT NULL + +message + + [TEXT] + NOT NULL + +timestamp + + [TIMESTAMP] + NOT NULL dag--dag_warning - -0..N -1 + +0..N +1 - + dag_favorite - -dag_favorite - -dag_id - - [VARCHAR(250)] - NOT NULL - -user_id - - [VARCHAR(250)] - NOT NULL + +dag_favorite + +dag_id + + [VARCHAR(250)] + NOT NULL + +user_id + + [VARCHAR(250)] + NOT NULL dag--dag_favorite - -0..N -1 + +0..N +1 - + deadline - -deadline - -id - - [UUID] - NOT NULL - -callback - - [VARCHAR(500)] - NOT NULL - -callback_kwargs - - [JSON] - -dag_id - - [VARCHAR(250)] - -dagrun_id - - [INTEGER] - -deadline_time - - [TIMESTAMP] - NOT NULL + +deadline + +id + + [UUID] + NOT NULL + +callback + + [VARCHAR(500)] + NOT NULL + +callback_kwargs + + [JSON] + +dag_id + + [VARCHAR(250)] + +dagrun_id + + [INTEGER] + +deadline_time + + [TIMESTAMP] + NOT NULL dag--deadline - -0..N -{0,1} + +0..N +{0,1} dag_version--task_instance -<<<<<<< HEAD 0..N 1 -======= - -0..N -{0,1} ->>>>>>> fc78b17140f (feat: AIP-90 PoC) - + dag_run - -dag_run - -id - - [INTEGER] - NOT NULL - -backfill_id - - [INTEGER] - -bundle_version - - [VARCHAR(250)] - -clear_number - - [INTEGER] - NOT NULL - -conf - - [JSONB] - -context_carrier - - [JSONB] - -created_dag_version_id - - [UUID] - -creating_job_id - - [INTEGER] - -dag_id - - [VARCHAR(250)] - NOT NULL - -data_interval_end - - [TIMESTAMP] - -data_interval_start - - [TIMESTAMP] - -end_date - - [TIMESTAMP] - -last_scheduling_decision - - [TIMESTAMP] - -log_template_id - - [INTEGER] - -logical_date - - [TIMESTAMP] - -queued_at - - [TIMESTAMP] - -run_after - - [TIMESTAMP] - NOT NULL - -run_id - - [VARCHAR(250)] - NOT NULL - -run_type - - [VARCHAR(50)] - NOT NULL - -scheduled_by_job_id - - [INTEGER] - -span_status - - [VARCHAR(250)] - NOT NULL - -start_date - - [TIMESTAMP] - -state - - [VARCHAR(50)] - -triggered_by - - [VARCHAR(50)] - -triggering_user_name - - [VARCHAR(512)] - -updated_at - - [TIMESTAMP] + +dag_run + +id + + [INTEGER] + NOT NULL + +backfill_id + + [INTEGER] + +bundle_version + + [VARCHAR(250)] + +clear_number + + [INTEGER] + NOT NULL + +conf + + [JSONB] + +context_carrier + + [JSONB] + +created_dag_version_id + + [UUID] + +creating_job_id + + [INTEGER] + +dag_id + + [VARCHAR(250)] + NOT NULL + +data_interval_end + + [TIMESTAMP] + +data_interval_start + + [TIMESTAMP] + +end_date + + [TIMESTAMP] + +last_scheduling_decision + + [TIMESTAMP] + +log_template_id + + [INTEGER] + +logical_date + + [TIMESTAMP] + +queued_at + + [TIMESTAMP] + +run_after + + [TIMESTAMP] + NOT NULL + +run_id + + [VARCHAR(250)] + NOT NULL + +run_type + + [VARCHAR(50)] + NOT NULL + +scheduled_by_job_id + + [INTEGER] + +span_status + + [VARCHAR(250)] + NOT NULL + +start_date + + [TIMESTAMP] + +state + + [VARCHAR(50)] + +triggered_by + + [VARCHAR(50)] + +triggering_user_name + + [VARCHAR(512)] + +updated_at + + [TIMESTAMP] dag_version--dag_run - -0..N -{0,1} + +0..N +{0,1} - + dag_code - -dag_code - -id - - [UUID] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -dag_version_id - - [UUID] - NOT NULL - -fileloc - - [VARCHAR(2000)] - NOT NULL - -last_updated - - [TIMESTAMP] - NOT NULL - -source_code - - [TEXT] - NOT NULL - -source_code_hash - - [VARCHAR(32)] - NOT NULL + +dag_code + +id + + [UUID] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +dag_version_id + + [UUID] + NOT NULL + +fileloc + + [VARCHAR(2000)] + NOT NULL + +last_updated + + [TIMESTAMP] + NOT NULL + +source_code + + [TEXT] + NOT NULL + +source_code_hash + + [VARCHAR(32)] + NOT NULL dag_version--dag_code - -0..N -1 + +0..N +1 - + serialized_dag - -serialized_dag - -id - - [UUID] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -dag_hash - - [VARCHAR(32)] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -dag_version_id - - [UUID] - NOT NULL - -data - - [JSON] - -data_compressed - - [BYTEA] - -last_updated - - [TIMESTAMP] - NOT NULL + +serialized_dag + +id + + [UUID] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +dag_hash + + [VARCHAR(32)] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +dag_version_id + + [UUID] + NOT NULL + +data + + [JSON] + +data_compressed + + [BYTEA] + +last_updated + + [TIMESTAMP] + NOT NULL dag_version--serialized_dag - -0..N -1 + +0..N +1 dag_run--dagrun_asset_event - -0..N -1 + +0..N +1 dag_run--task_instance - -0..N -1 + +0..N +1 dag_run--task_instance - -0..N -1 + +0..N +1 dag_run--deadline - -0..N -{0,1} + +0..N +{0,1} - + backfill_dag_run - -backfill_dag_run - -id - - [INTEGER] - NOT NULL - -backfill_id - - [INTEGER] - NOT NULL - -dag_run_id - - [INTEGER] - -exception_reason - - [VARCHAR(250)] - -logical_date - - [TIMESTAMP] - NOT NULL - -sort_ordinal - - [INTEGER] - NOT NULL + +backfill_dag_run + +id + + [INTEGER] + NOT NULL + +backfill_id + + [INTEGER] + NOT NULL + +dag_run_id + + [INTEGER] + +exception_reason + + [VARCHAR(250)] + +logical_date + + [TIMESTAMP] + NOT NULL + +sort_ordinal + + [INTEGER] + NOT NULL dag_run--backfill_dag_run - -0..N -{0,1} + +0..N +{0,1} - + dag_run_note - -dag_run_note - -dag_run_id - - [INTEGER] - NOT NULL - -content - - [VARCHAR(1000)] - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL - -user_id - - [VARCHAR(128)] + +dag_run_note + +dag_run_id + + [INTEGER] + NOT NULL + +content + + [VARCHAR(1000)] + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL + +user_id + + [VARCHAR(128)] dag_run--dag_run_note - -1 -1 + +1 +1 - + log_template - -log_template - -id - - [INTEGER] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -elasticsearch_id - - [TEXT] - NOT NULL - -filename - - [TEXT] - NOT NULL + +log_template + +id + + [INTEGER] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +elasticsearch_id + + [TEXT] + NOT NULL + +filename + + [TEXT] + NOT NULL log_template--dag_run - -0..N -{0,1} + +0..N +{0,1} - + backfill - -backfill - -id - - [INTEGER] - NOT NULL - -completed_at - - [TIMESTAMP] - -created_at - - [TIMESTAMP] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -dag_run_conf - - [JSON] - NOT NULL - -from_date - - [TIMESTAMP] - NOT NULL - -is_paused - - [BOOLEAN] - -max_active_runs - - [INTEGER] - NOT NULL - -reprocess_behavior - - [VARCHAR(250)] - NOT NULL - -to_date - - [TIMESTAMP] - NOT NULL - -triggering_user_name - - [VARCHAR(512)] - -updated_at - - [TIMESTAMP] - NOT NULL + +backfill + +id + + [INTEGER] + NOT NULL + +completed_at + + [TIMESTAMP] + +created_at + + [TIMESTAMP] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +dag_run_conf + + [JSON] + NOT NULL + +from_date + + [TIMESTAMP] + NOT NULL + +is_paused + + [BOOLEAN] + +max_active_runs + + [INTEGER] + NOT NULL + +reprocess_behavior + + [VARCHAR(250)] + NOT NULL + +to_date + + [TIMESTAMP] + NOT NULL + +triggering_user_name + + [VARCHAR(512)] + +updated_at + + [TIMESTAMP] + NOT NULL backfill--dag_run - -0..N -{0,1} + +0..N +{0,1} backfill--backfill_dag_run - -0..N -1 + +0..N +1 - + alembic_version - -alembic_version - -version_num - - [VARCHAR(32)] - NOT NULL + +alembic_version + +version_num + + [VARCHAR(32)] + NOT NULL diff --git a/airflow-core/docs/migrations-ref.rst b/airflow-core/docs/migrations-ref.rst index 0a2d4ea6a89f4..8037d10ed06e9 100644 --- a/airflow-core/docs/migrations-ref.rst +++ b/airflow-core/docs/migrations-ref.rst @@ -39,7 +39,9 @@ Here's the list of all the Database Migrations that are executed via when you ru +-------------------------+------------------+-------------------+--------------------------------------------------------------+ | Revision ID | Revises ID | Airflow Version | Description | +=========================+==================+===================+==============================================================+ -| ``5d3072c51bac`` (head) | ``ffdb0566c7c0`` | ``3.1.0`` | Make dag_version_id non-nullable in TaskInstance. | +| ``40f7c30a228b`` (head) | ``5d3072c51bac`` | ``3.1.0`` | Add Human In the Loop Response table. | ++-------------------------+------------------+-------------------+--------------------------------------------------------------+ +| ``5d3072c51bac`` | ``ffdb0566c7c0`` | ``3.1.0`` | Make dag_version_id non-nullable in TaskInstance. | +-------------------------+------------------+-------------------+--------------------------------------------------------------+ | ``ffdb0566c7c0`` | ``66a7743fe20e`` | ``3.1.0`` | Add dag_favorite table. | +-------------------------+------------------+-------------------+--------------------------------------------------------------+ diff --git a/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml b/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml index 073e2c9c8a38e..b0a3ff994380f 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml +++ b/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml @@ -7122,6 +7122,147 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' + /api/v2/hitl-responses/{task_instance_id}: + patch: + tags: + - HumanInTheLoop + summary: Update Hitl Response + description: Update a Human-in-the-loop response. + operationId: update_hitl_response + security: + - OAuth2PasswordBearer: [] + parameters: + - name: task_instance_id + in: path + required: true + schema: + type: string + format: uuid + title: Task Instance Id + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/UpdateHITLResponsePayload' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/HITLResponseContentDetail' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Conflict + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + get: + tags: + - HumanInTheLoop + summary: Get Hitl Response + description: Get a Human-in-the-loop Response of a specific task instance. + operationId: get_hitl_response + security: + - OAuth2PasswordBearer: [] + parameters: + - name: task_instance_id + in: path + required: true + schema: + type: string + format: uuid + title: Task Instance Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/HITLResponseDetail' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Conflict + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/hitl-responses/: + get: + tags: + - HumanInTheLoop + summary: Get Hitl Responses + description: Get Human-in-the-loop Responses. + operationId: get_hitl_responses + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/HITLResponseDetailCollection' + '401': + description: Unauthorized + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + '403': + description: Forbidden + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + security: + - OAuth2PasswordBearer: [] /api/v2/monitor/health: get: tags: @@ -9591,6 +9732,109 @@ components: - name title: FastAPIRootMiddlewareResponse description: Serializer for Plugin FastAPI root middleware responses. + HITLResponseContentDetail: + properties: + response_content: + type: string + title: Response Content + response_at: + type: string + format: date-time + title: Response At + user_id: + type: string + title: User Id + type: object + required: + - response_content + - response_at + - user_id + title: HITLResponseContentDetail + description: Response of updating a Human-in-the-loop response. + HITLResponseDetail: + properties: + ti_id: + type: string + title: Ti Id + options: + items: + type: string + type: array + title: Options + subject: + type: string + title: Subject + body: + anyOf: + - type: string + - type: 'null' + title: Body + default: + anyOf: + - items: + type: string + type: array + - type: 'null' + title: Default + multiple: + type: boolean + title: Multiple + default: false + params: + anyOf: + - additionalProperties: true + type: object + - type: 'null' + title: Params + response_at: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Response At + user_id: + anyOf: + - type: string + - type: 'null' + title: User Id + response_content: + anyOf: + - type: string + - type: 'null' + title: Response Content + params_input: + anyOf: + - additionalProperties: true + type: object + - type: 'null' + title: Params Input + response_received: + type: boolean + title: Response Received + default: false + type: object + required: + - ti_id + - options + - subject + title: HITLResponseDetail + description: Schema for Human-in-the-loop response. + HITLResponseDetailCollection: + properties: + hitl_responses: + items: + $ref: '#/components/schemas/HITLResponseDetail' + type: array + title: Hitl Responses + total_entries: + type: integer + title: Total Entries + type: object + required: + - hitl_responses + - total_entries + title: HITLResponseDetailCollection + description: Schema for a collection of Human-in-the-loop responses. HTTPExceptionResponse: properties: detail: @@ -11120,6 +11364,16 @@ components: - latest_triggerer_heartbeat title: TriggererInfoResponse description: Triggerer info serializer for responses. + UpdateHITLResponsePayload: + properties: + response_content: + type: string + title: Response Content + type: object + required: + - response_content + title: UpdateHITLResponsePayload + description: Schema for updating the content of a Human-in-the-loop response. ValidationError: properties: loc: diff --git a/airflow-core/src/airflow/migrations/versions/0075_3_1_0_add_human_in_the_loop_response.py b/airflow-core/src/airflow/migrations/versions/0077_3_1_0_add_human_in_the_loop_response.py similarity index 97% rename from airflow-core/src/airflow/migrations/versions/0075_3_1_0_add_human_in_the_loop_response.py rename to airflow-core/src/airflow/migrations/versions/0077_3_1_0_add_human_in_the_loop_response.py index 84509126789e0..742114c8b269d 100644 --- a/airflow-core/src/airflow/migrations/versions/0075_3_1_0_add_human_in_the_loop_response.py +++ b/airflow-core/src/airflow/migrations/versions/0077_3_1_0_add_human_in_the_loop_response.py @@ -20,7 +20,7 @@ Add Human In the Loop Response table. Revision ID: 40f7c30a228b -Revises: 66a7743fe20e +Revises: 5d3072c51bac Create Date: 2025-07-04 15:05:19.459197 """ @@ -37,10 +37,9 @@ # revision identifiers, used by Alembic. revision = "40f7c30a228b" -down_revision = "66a7743fe20e" +down_revision = "5d3072c51bac" branch_labels = None depends_on = None - airflow_version = "3.1.0" diff --git a/airflow-core/src/airflow/utils/db.py b/airflow-core/src/airflow/utils/db.py index 8666deac458d4..c99972a8b1e96 100644 --- a/airflow-core/src/airflow/utils/db.py +++ b/airflow-core/src/airflow/utils/db.py @@ -93,7 +93,7 @@ class MappedClassProtocol(Protocol): "2.10.3": "5f2621c13b39", "3.0.0": "29ce7909c52b", "3.0.3": "fe199e1abd77", - "3.1.0": "5d3072c51bac", + "3.1.0": "40f7c30a228b", } From cae9039ef6abd890de32ff1bfd78903aa641ab3a Mon Sep 17 00:00:00 2001 From: Wei Lee Date: Sat, 5 Jul 2025 10:45:08 +0800 Subject: [PATCH 07/30] build: rebuild frontend --- .../airflow/ui/openapi-gen/queries/common.ts | 13 +- .../ui/openapi-gen/queries/ensureQueryData.ts | 20 +- .../ui/openapi-gen/queries/prefetch.ts | 20 +- .../airflow/ui/openapi-gen/queries/queries.ts | 38 +++- .../ui/openapi-gen/queries/suspense.ts | 20 +- .../ui/openapi-gen/requests/schemas.gen.ts | 172 ++++++++++++++++++ .../ui/openapi-gen/requests/services.gen.ts | 75 +++++++- .../ui/openapi-gen/requests/types.gen.ts | 139 ++++++++++++++ 8 files changed, 490 insertions(+), 7 deletions(-) diff --git a/airflow-core/src/airflow/ui/openapi-gen/queries/common.ts b/airflow-core/src/airflow/ui/openapi-gen/queries/common.ts index 143ec4c76550c..04ca2a2012203 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/queries/common.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/queries/common.ts @@ -1,7 +1,7 @@ // generated with @7nohe/openapi-react-query-codegen@1.6.2 import { UseQueryResult } from "@tanstack/react-query"; -import { AssetService, AuthLinksService, BackfillService, CalendarService, ConfigService, ConnectionService, DagParsingService, DagReportService, DagRunService, DagService, DagSourceService, DagStatsService, DagVersionService, DagWarningService, DashboardService, DependenciesService, EventLogService, ExperimentalService, ExtraLinksService, GridService, ImportErrorService, JobService, LoginService, MonitorService, PluginService, PoolService, ProviderService, StructureService, TaskInstanceService, TaskService, VariableService, VersionService, XcomService } from "../requests/services.gen"; +import { AssetService, AuthLinksService, BackfillService, CalendarService, ConfigService, ConnectionService, DagParsingService, DagReportService, DagRunService, DagService, DagSourceService, DagStatsService, DagVersionService, DagWarningService, DashboardService, DependenciesService, EventLogService, ExperimentalService, ExtraLinksService, GridService, HumanInTheLoopService, ImportErrorService, JobService, LoginService, MonitorService, PluginService, PoolService, ProviderService, StructureService, TaskInstanceService, TaskService, VariableService, VersionService, XcomService } from "../requests/services.gen"; import { DagRunState, DagWarningType } from "../requests/types.gen"; export type AssetServiceGetAssetsDefaultResponse = Awaited>; export type AssetServiceGetAssetsQueryResult = UseQueryResult; @@ -620,6 +620,16 @@ export const UseDagVersionServiceGetDagVersionsKeyFn = ({ bundleName, bundleVers orderBy?: string; versionNumber?: number; }, queryKey?: Array) => [useDagVersionServiceGetDagVersionsKey, ...(queryKey ?? [{ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }])]; +export type HumanInTheLoopServiceGetHitlResponseDefaultResponse = Awaited>; +export type HumanInTheLoopServiceGetHitlResponseQueryResult = UseQueryResult; +export const useHumanInTheLoopServiceGetHitlResponseKey = "HumanInTheLoopServiceGetHitlResponse"; +export const UseHumanInTheLoopServiceGetHitlResponseKeyFn = ({ taskInstanceId }: { + taskInstanceId: string; +}, queryKey?: Array) => [useHumanInTheLoopServiceGetHitlResponseKey, ...(queryKey ?? [{ taskInstanceId }])]; +export type HumanInTheLoopServiceGetHitlResponsesDefaultResponse = Awaited>; +export type HumanInTheLoopServiceGetHitlResponsesQueryResult = UseQueryResult; +export const useHumanInTheLoopServiceGetHitlResponsesKey = "HumanInTheLoopServiceGetHitlResponses"; +export const UseHumanInTheLoopServiceGetHitlResponsesKeyFn = (queryKey?: Array) => [useHumanInTheLoopServiceGetHitlResponsesKey, ...(queryKey ?? [])]; export type MonitorServiceGetHealthDefaultResponse = Awaited>; export type MonitorServiceGetHealthQueryResult = UseQueryResult; export const useMonitorServiceGetHealthKey = "MonitorServiceGetHealth"; @@ -752,6 +762,7 @@ export type PoolServiceBulkPoolsMutationResult = Awaited>; export type VariableServicePatchVariableMutationResult = Awaited>; export type VariableServiceBulkVariablesMutationResult = Awaited>; +export type HumanInTheLoopServiceUpdateHitlResponseMutationResult = Awaited>; export type AssetServiceDeleteAssetQueuedEventsMutationResult = Awaited>; export type AssetServiceDeleteDagAssetQueuedEventsMutationResult = Awaited>; export type AssetServiceDeleteDagAssetQueuedEventMutationResult = Awaited>; diff --git a/airflow-core/src/airflow/ui/openapi-gen/queries/ensureQueryData.ts b/airflow-core/src/airflow/ui/openapi-gen/queries/ensureQueryData.ts index 1c0fc86697c2a..a5985fffe1ab0 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/queries/ensureQueryData.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/queries/ensureQueryData.ts @@ -1,7 +1,7 @@ // generated with @7nohe/openapi-react-query-codegen@1.6.2 import { type QueryClient } from "@tanstack/react-query"; -import { AssetService, AuthLinksService, BackfillService, CalendarService, ConfigService, ConnectionService, DagReportService, DagRunService, DagService, DagSourceService, DagStatsService, DagVersionService, DagWarningService, DashboardService, DependenciesService, EventLogService, ExperimentalService, ExtraLinksService, GridService, ImportErrorService, JobService, LoginService, MonitorService, PluginService, PoolService, ProviderService, StructureService, TaskInstanceService, TaskService, VariableService, VersionService, XcomService } from "../requests/services.gen"; +import { AssetService, AuthLinksService, BackfillService, CalendarService, ConfigService, ConnectionService, DagReportService, DagRunService, DagService, DagSourceService, DagStatsService, DagVersionService, DagWarningService, DashboardService, DependenciesService, EventLogService, ExperimentalService, ExtraLinksService, GridService, HumanInTheLoopService, ImportErrorService, JobService, LoginService, MonitorService, PluginService, PoolService, ProviderService, StructureService, TaskInstanceService, TaskService, VariableService, VersionService, XcomService } from "../requests/services.gen"; import { DagRunState, DagWarningType } from "../requests/types.gen"; import * as Common from "./common"; /** @@ -1172,6 +1172,24 @@ export const ensureUseDagVersionServiceGetDagVersionsData = (queryClient: QueryC versionNumber?: number; }) => queryClient.ensureQueryData({ queryKey: Common.UseDagVersionServiceGetDagVersionsKeyFn({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }), queryFn: () => DagVersionService.getDagVersions({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }) }); /** +* Get Hitl Response +* Get a Human-in-the-loop Response of a specific task instance. +* @param data The data for the request. +* @param data.taskInstanceId +* @returns HITLResponseDetail Successful Response +* @throws ApiError +*/ +export const ensureUseHumanInTheLoopServiceGetHitlResponseData = (queryClient: QueryClient, { taskInstanceId }: { + taskInstanceId: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseHumanInTheLoopServiceGetHitlResponseKeyFn({ taskInstanceId }), queryFn: () => HumanInTheLoopService.getHitlResponse({ taskInstanceId }) }); +/** +* Get Hitl Responses +* Get Human-in-the-loop Responses. +* @returns HITLResponseDetailCollection Successful Response +* @throws ApiError +*/ +export const ensureUseHumanInTheLoopServiceGetHitlResponsesData = (queryClient: QueryClient) => queryClient.ensureQueryData({ queryKey: Common.UseHumanInTheLoopServiceGetHitlResponsesKeyFn(), queryFn: () => HumanInTheLoopService.getHitlResponses() }); +/** * Get Health * @returns HealthInfoResponse Successful Response * @throws ApiError diff --git a/airflow-core/src/airflow/ui/openapi-gen/queries/prefetch.ts b/airflow-core/src/airflow/ui/openapi-gen/queries/prefetch.ts index d220cf4d19589..e33f4e8a274e3 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/queries/prefetch.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/queries/prefetch.ts @@ -1,7 +1,7 @@ // generated with @7nohe/openapi-react-query-codegen@1.6.2 import { type QueryClient } from "@tanstack/react-query"; -import { AssetService, AuthLinksService, BackfillService, CalendarService, ConfigService, ConnectionService, DagReportService, DagRunService, DagService, DagSourceService, DagStatsService, DagVersionService, DagWarningService, DashboardService, DependenciesService, EventLogService, ExperimentalService, ExtraLinksService, GridService, ImportErrorService, JobService, LoginService, MonitorService, PluginService, PoolService, ProviderService, StructureService, TaskInstanceService, TaskService, VariableService, VersionService, XcomService } from "../requests/services.gen"; +import { AssetService, AuthLinksService, BackfillService, CalendarService, ConfigService, ConnectionService, DagReportService, DagRunService, DagService, DagSourceService, DagStatsService, DagVersionService, DagWarningService, DashboardService, DependenciesService, EventLogService, ExperimentalService, ExtraLinksService, GridService, HumanInTheLoopService, ImportErrorService, JobService, LoginService, MonitorService, PluginService, PoolService, ProviderService, StructureService, TaskInstanceService, TaskService, VariableService, VersionService, XcomService } from "../requests/services.gen"; import { DagRunState, DagWarningType } from "../requests/types.gen"; import * as Common from "./common"; /** @@ -1172,6 +1172,24 @@ export const prefetchUseDagVersionServiceGetDagVersions = (queryClient: QueryCli versionNumber?: number; }) => queryClient.prefetchQuery({ queryKey: Common.UseDagVersionServiceGetDagVersionsKeyFn({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }), queryFn: () => DagVersionService.getDagVersions({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }) }); /** +* Get Hitl Response +* Get a Human-in-the-loop Response of a specific task instance. +* @param data The data for the request. +* @param data.taskInstanceId +* @returns HITLResponseDetail Successful Response +* @throws ApiError +*/ +export const prefetchUseHumanInTheLoopServiceGetHitlResponse = (queryClient: QueryClient, { taskInstanceId }: { + taskInstanceId: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseHumanInTheLoopServiceGetHitlResponseKeyFn({ taskInstanceId }), queryFn: () => HumanInTheLoopService.getHitlResponse({ taskInstanceId }) }); +/** +* Get Hitl Responses +* Get Human-in-the-loop Responses. +* @returns HITLResponseDetailCollection Successful Response +* @throws ApiError +*/ +export const prefetchUseHumanInTheLoopServiceGetHitlResponses = (queryClient: QueryClient) => queryClient.prefetchQuery({ queryKey: Common.UseHumanInTheLoopServiceGetHitlResponsesKeyFn(), queryFn: () => HumanInTheLoopService.getHitlResponses() }); +/** * Get Health * @returns HealthInfoResponse Successful Response * @throws ApiError diff --git a/airflow-core/src/airflow/ui/openapi-gen/queries/queries.ts b/airflow-core/src/airflow/ui/openapi-gen/queries/queries.ts index 8a7ffc0525158..66c9177457fba 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/queries/queries.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/queries/queries.ts @@ -1,8 +1,8 @@ // generated with @7nohe/openapi-react-query-codegen@1.6.2 import { UseMutationOptions, UseQueryOptions, useMutation, useQuery } from "@tanstack/react-query"; -import { AssetService, AuthLinksService, BackfillService, CalendarService, ConfigService, ConnectionService, DagParsingService, DagReportService, DagRunService, DagService, DagSourceService, DagStatsService, DagVersionService, DagWarningService, DashboardService, DependenciesService, EventLogService, ExperimentalService, ExtraLinksService, GridService, ImportErrorService, JobService, LoginService, MonitorService, PluginService, PoolService, ProviderService, StructureService, TaskInstanceService, TaskService, VariableService, VersionService, XcomService } from "../requests/services.gen"; -import { BackfillPostBody, BulkBody_BulkTaskInstanceBody_, BulkBody_ConnectionBody_, BulkBody_PoolBody_, BulkBody_VariableBody_, ClearTaskInstancesBody, ConnectionBody, CreateAssetEventsBody, DAGPatchBody, DAGRunClearBody, DAGRunPatchBody, DAGRunsBatchBody, DagRunState, DagWarningType, PatchTaskInstanceBody, PoolBody, PoolPatchBody, TaskInstancesBatchBody, TriggerDAGRunPostBody, VariableBody, XComCreateBody, XComUpdateBody } from "../requests/types.gen"; +import { AssetService, AuthLinksService, BackfillService, CalendarService, ConfigService, ConnectionService, DagParsingService, DagReportService, DagRunService, DagService, DagSourceService, DagStatsService, DagVersionService, DagWarningService, DashboardService, DependenciesService, EventLogService, ExperimentalService, ExtraLinksService, GridService, HumanInTheLoopService, ImportErrorService, JobService, LoginService, MonitorService, PluginService, PoolService, ProviderService, StructureService, TaskInstanceService, TaskService, VariableService, VersionService, XcomService } from "../requests/services.gen"; +import { BackfillPostBody, BulkBody_BulkTaskInstanceBody_, BulkBody_ConnectionBody_, BulkBody_PoolBody_, BulkBody_VariableBody_, ClearTaskInstancesBody, ConnectionBody, CreateAssetEventsBody, DAGPatchBody, DAGRunClearBody, DAGRunPatchBody, DAGRunsBatchBody, DagRunState, DagWarningType, PatchTaskInstanceBody, PoolBody, PoolPatchBody, TaskInstancesBatchBody, TriggerDAGRunPostBody, UpdateHITLResponsePayload, VariableBody, XComCreateBody, XComUpdateBody } from "../requests/types.gen"; import * as Common from "./common"; /** * Get Assets @@ -1172,6 +1172,24 @@ export const useDagVersionServiceGetDagVersions = , "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagVersionServiceGetDagVersionsKeyFn({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }, queryKey), queryFn: () => DagVersionService.getDagVersions({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }) as TData, ...options }); /** +* Get Hitl Response +* Get a Human-in-the-loop Response of a specific task instance. +* @param data The data for the request. +* @param data.taskInstanceId +* @returns HITLResponseDetail Successful Response +* @throws ApiError +*/ +export const useHumanInTheLoopServiceGetHitlResponse = = unknown[]>({ taskInstanceId }: { + taskInstanceId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseHumanInTheLoopServiceGetHitlResponseKeyFn({ taskInstanceId }, queryKey), queryFn: () => HumanInTheLoopService.getHitlResponse({ taskInstanceId }) as TData, ...options }); +/** +* Get Hitl Responses +* Get Human-in-the-loop Responses. +* @returns HITLResponseDetailCollection Successful Response +* @throws ApiError +*/ +export const useHumanInTheLoopServiceGetHitlResponses = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseHumanInTheLoopServiceGetHitlResponsesKeyFn(queryKey), queryFn: () => HumanInTheLoopService.getHitlResponses() as TData, ...options }); +/** * Get Health * @returns HealthInfoResponse Successful Response * @throws ApiError @@ -1989,6 +2007,22 @@ export const useVariableServiceBulkVariables = ({ mutationFn: ({ requestBody }) => VariableService.bulkVariables({ requestBody }) as unknown as Promise, ...options }); /** +* Update Hitl Response +* Update a Human-in-the-loop response. +* @param data The data for the request. +* @param data.taskInstanceId +* @param data.requestBody +* @returns HITLResponseContentDetail Successful Response +* @throws ApiError +*/ +export const useHumanInTheLoopServiceUpdateHitlResponse = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ requestBody, taskInstanceId }) => HumanInTheLoopService.updateHitlResponse({ requestBody, taskInstanceId }) as unknown as Promise, ...options }); +/** * Delete Asset Queued Events * Delete queued asset events for an asset. * @param data The data for the request. diff --git a/airflow-core/src/airflow/ui/openapi-gen/queries/suspense.ts b/airflow-core/src/airflow/ui/openapi-gen/queries/suspense.ts index 57f12caea7517..70c0c0c6f0ffc 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/queries/suspense.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/queries/suspense.ts @@ -1,7 +1,7 @@ // generated with @7nohe/openapi-react-query-codegen@1.6.2 import { UseQueryOptions, useSuspenseQuery } from "@tanstack/react-query"; -import { AssetService, AuthLinksService, BackfillService, CalendarService, ConfigService, ConnectionService, DagReportService, DagRunService, DagService, DagSourceService, DagStatsService, DagVersionService, DagWarningService, DashboardService, DependenciesService, EventLogService, ExperimentalService, ExtraLinksService, GridService, ImportErrorService, JobService, LoginService, MonitorService, PluginService, PoolService, ProviderService, StructureService, TaskInstanceService, TaskService, VariableService, VersionService, XcomService } from "../requests/services.gen"; +import { AssetService, AuthLinksService, BackfillService, CalendarService, ConfigService, ConnectionService, DagReportService, DagRunService, DagService, DagSourceService, DagStatsService, DagVersionService, DagWarningService, DashboardService, DependenciesService, EventLogService, ExperimentalService, ExtraLinksService, GridService, HumanInTheLoopService, ImportErrorService, JobService, LoginService, MonitorService, PluginService, PoolService, ProviderService, StructureService, TaskInstanceService, TaskService, VariableService, VersionService, XcomService } from "../requests/services.gen"; import { DagRunState, DagWarningType } from "../requests/types.gen"; import * as Common from "./common"; /** @@ -1172,6 +1172,24 @@ export const useDagVersionServiceGetDagVersionsSuspense = , "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagVersionServiceGetDagVersionsKeyFn({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }, queryKey), queryFn: () => DagVersionService.getDagVersions({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }) as TData, ...options }); /** +* Get Hitl Response +* Get a Human-in-the-loop Response of a specific task instance. +* @param data The data for the request. +* @param data.taskInstanceId +* @returns HITLResponseDetail Successful Response +* @throws ApiError +*/ +export const useHumanInTheLoopServiceGetHitlResponseSuspense = = unknown[]>({ taskInstanceId }: { + taskInstanceId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseHumanInTheLoopServiceGetHitlResponseKeyFn({ taskInstanceId }, queryKey), queryFn: () => HumanInTheLoopService.getHitlResponse({ taskInstanceId }) as TData, ...options }); +/** +* Get Hitl Responses +* Get Human-in-the-loop Responses. +* @returns HITLResponseDetailCollection Successful Response +* @throws ApiError +*/ +export const useHumanInTheLoopServiceGetHitlResponsesSuspense = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseHumanInTheLoopServiceGetHitlResponsesKeyFn(queryKey), queryFn: () => HumanInTheLoopService.getHitlResponses() as TData, ...options }); +/** * Get Health * @returns HealthInfoResponse Successful Response * @throws ApiError diff --git a/airflow-core/src/airflow/ui/openapi-gen/requests/schemas.gen.ts b/airflow-core/src/airflow/ui/openapi-gen/requests/schemas.gen.ts index 5a46ac9e847bd..e83e454e0873b 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/requests/schemas.gen.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/requests/schemas.gen.ts @@ -3408,6 +3408,165 @@ export const $FastAPIRootMiddlewareResponse = { description: 'Serializer for Plugin FastAPI root middleware responses.' } as const; +export const $HITLResponseContentDetail = { + properties: { + response_content: { + type: 'string', + title: 'Response Content' + }, + response_at: { + type: 'string', + format: 'date-time', + title: 'Response At' + }, + user_id: { + type: 'string', + title: 'User Id' + } + }, + type: 'object', + required: ['response_content', 'response_at', 'user_id'], + title: 'HITLResponseContentDetail', + description: 'Response of updating a Human-in-the-loop response.' +} as const; + +export const $HITLResponseDetail = { + properties: { + ti_id: { + type: 'string', + title: 'Ti Id' + }, + options: { + items: { + type: 'string' + }, + type: 'array', + title: 'Options' + }, + subject: { + type: 'string', + title: 'Subject' + }, + body: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Body' + }, + default: { + anyOf: [ + { + items: { + type: 'string' + }, + type: 'array' + }, + { + type: 'null' + } + ], + title: 'Default' + }, + multiple: { + type: 'boolean', + title: 'Multiple', + default: false + }, + params: { + anyOf: [ + { + additionalProperties: true, + type: 'object' + }, + { + type: 'null' + } + ], + title: 'Params' + }, + response_at: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Response At' + }, + user_id: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'User Id' + }, + response_content: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Response Content' + }, + params_input: { + anyOf: [ + { + additionalProperties: true, + type: 'object' + }, + { + type: 'null' + } + ], + title: 'Params Input' + }, + response_received: { + type: 'boolean', + title: 'Response Received', + default: false + } + }, + type: 'object', + required: ['ti_id', 'options', 'subject'], + title: 'HITLResponseDetail', + description: 'Schema for Human-in-the-loop response.' +} as const; + +export const $HITLResponseDetailCollection = { + properties: { + hitl_responses: { + items: { + '$ref': '#/components/schemas/HITLResponseDetail' + }, + type: 'array', + title: 'Hitl Responses' + }, + total_entries: { + type: 'integer', + title: 'Total Entries' + } + }, + type: 'object', + required: ['hitl_responses', 'total_entries'], + title: 'HITLResponseDetailCollection', + description: 'Schema for a collection of Human-in-the-loop responses.' +} as const; + export const $HTTPExceptionResponse = { properties: { detail: { @@ -5696,6 +5855,19 @@ export const $TriggererInfoResponse = { description: 'Triggerer info serializer for responses.' } as const; +export const $UpdateHITLResponsePayload = { + properties: { + response_content: { + type: 'string', + title: 'Response Content' + } + }, + type: 'object', + required: ['response_content'], + title: 'UpdateHITLResponsePayload', + description: 'Schema for updating the content of a Human-in-the-loop response.' +} as const; + export const $ValidationError = { properties: { loc: { diff --git a/airflow-core/src/airflow/ui/openapi-gen/requests/services.gen.ts b/airflow-core/src/airflow/ui/openapi-gen/requests/services.gen.ts index 321a708f6b37f..196b663c8c449 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/requests/services.gen.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/requests/services.gen.ts @@ -3,7 +3,7 @@ import type { CancelablePromise } from './core/CancelablePromise'; import { OpenAPI } from './core/OpenAPI'; import { request as __request } from './core/request'; -import type { GetAssetsData, GetAssetsResponse, GetAssetAliasesData, GetAssetAliasesResponse, GetAssetAliasData, GetAssetAliasResponse, GetAssetEventsData, GetAssetEventsResponse, CreateAssetEventData, CreateAssetEventResponse, MaterializeAssetData, MaterializeAssetResponse, GetAssetQueuedEventsData, GetAssetQueuedEventsResponse, DeleteAssetQueuedEventsData, DeleteAssetQueuedEventsResponse, GetAssetData, GetAssetResponse, GetDagAssetQueuedEventsData, GetDagAssetQueuedEventsResponse, DeleteDagAssetQueuedEventsData, DeleteDagAssetQueuedEventsResponse, GetDagAssetQueuedEventData, GetDagAssetQueuedEventResponse, DeleteDagAssetQueuedEventData, DeleteDagAssetQueuedEventResponse, NextRunAssetsData, NextRunAssetsResponse, ListBackfillsData, ListBackfillsResponse, CreateBackfillData, CreateBackfillResponse, GetBackfillData, GetBackfillResponse, PauseBackfillData, PauseBackfillResponse, UnpauseBackfillData, UnpauseBackfillResponse, CancelBackfillData, CancelBackfillResponse, CreateBackfillDryRunData, CreateBackfillDryRunResponse, ListBackfillsUiData, ListBackfillsUiResponse, DeleteConnectionData, DeleteConnectionResponse, GetConnectionData, GetConnectionResponse, PatchConnectionData, PatchConnectionResponse, GetConnectionsData, GetConnectionsResponse, PostConnectionData, PostConnectionResponse, BulkConnectionsData, BulkConnectionsResponse, TestConnectionData, TestConnectionResponse, CreateDefaultConnectionsResponse, HookMetaDataResponse, GetDagRunData, GetDagRunResponse, DeleteDagRunData, DeleteDagRunResponse, PatchDagRunData, PatchDagRunResponse, GetUpstreamAssetEventsData, GetUpstreamAssetEventsResponse, ClearDagRunData, ClearDagRunResponse, GetDagRunsData, GetDagRunsResponse, TriggerDagRunData, TriggerDagRunResponse, WaitDagRunUntilFinishedData, WaitDagRunUntilFinishedResponse, GetListDagRunsBatchData, GetListDagRunsBatchResponse, GetDagSourceData, GetDagSourceResponse, GetDagStatsData, GetDagStatsResponse, GetDagReportsData, GetDagReportsResponse, GetConfigData, GetConfigResponse, GetConfigValueData, GetConfigValueResponse, GetConfigsResponse, ListDagWarningsData, ListDagWarningsResponse, GetDagsData, GetDagsResponse, PatchDagsData, PatchDagsResponse, GetDagData, GetDagResponse, PatchDagData, PatchDagResponse, DeleteDagData, DeleteDagResponse, GetDagDetailsData, GetDagDetailsResponse, FavoriteDagData, FavoriteDagResponse, UnfavoriteDagData, UnfavoriteDagResponse, GetDagTagsData, GetDagTagsResponse, GetDagsUiData, GetDagsUiResponse, GetEventLogData, GetEventLogResponse, GetEventLogsData, GetEventLogsResponse, GetExtraLinksData, GetExtraLinksResponse, GetTaskInstanceData, GetTaskInstanceResponse, PatchTaskInstanceData, PatchTaskInstanceResponse, DeleteTaskInstanceData, DeleteTaskInstanceResponse, GetMappedTaskInstancesData, GetMappedTaskInstancesResponse, GetTaskInstanceDependenciesByMapIndexData, GetTaskInstanceDependenciesByMapIndexResponse, GetTaskInstanceDependenciesData, GetTaskInstanceDependenciesResponse, GetTaskInstanceTriesData, GetTaskInstanceTriesResponse, GetMappedTaskInstanceTriesData, GetMappedTaskInstanceTriesResponse, GetMappedTaskInstanceData, GetMappedTaskInstanceResponse, PatchTaskInstanceByMapIndexData, PatchTaskInstanceByMapIndexResponse, GetTaskInstancesData, GetTaskInstancesResponse, BulkTaskInstancesData, BulkTaskInstancesResponse, GetTaskInstancesBatchData, GetTaskInstancesBatchResponse, GetTaskInstanceTryDetailsData, GetTaskInstanceTryDetailsResponse, GetMappedTaskInstanceTryDetailsData, GetMappedTaskInstanceTryDetailsResponse, PostClearTaskInstancesData, PostClearTaskInstancesResponse, PatchTaskInstanceDryRunByMapIndexData, PatchTaskInstanceDryRunByMapIndexResponse, PatchTaskInstanceDryRunData, PatchTaskInstanceDryRunResponse, GetLogData, GetLogResponse, GetExternalLogUrlData, GetExternalLogUrlResponse, GetImportErrorData, GetImportErrorResponse, GetImportErrorsData, GetImportErrorsResponse, GetJobsData, GetJobsResponse, GetPluginsData, GetPluginsResponse, ImportErrorsResponse, DeletePoolData, DeletePoolResponse, GetPoolData, GetPoolResponse, PatchPoolData, PatchPoolResponse, GetPoolsData, GetPoolsResponse, PostPoolData, PostPoolResponse, BulkPoolsData, BulkPoolsResponse, GetProvidersData, GetProvidersResponse, GetXcomEntryData, GetXcomEntryResponse, UpdateXcomEntryData, UpdateXcomEntryResponse, GetXcomEntriesData, GetXcomEntriesResponse, CreateXcomEntryData, CreateXcomEntryResponse, GetTasksData, GetTasksResponse, GetTaskData, GetTaskResponse, DeleteVariableData, DeleteVariableResponse, GetVariableData, GetVariableResponse, PatchVariableData, PatchVariableResponse, GetVariablesData, GetVariablesResponse, PostVariableData, PostVariableResponse, BulkVariablesData, BulkVariablesResponse, ReparseDagFileData, ReparseDagFileResponse, GetDagVersionData, GetDagVersionResponse, GetDagVersionsData, GetDagVersionsResponse, GetHealthResponse, GetVersionResponse, LoginData, LoginResponse, LogoutData, LogoutResponse, GetAuthMenusResponse, GetDependenciesData, GetDependenciesResponse, HistoricalMetricsData, HistoricalMetricsResponse, DagStatsResponse2, StructureDataData, StructureDataResponse2, GetDagStructureData, GetDagStructureResponse, GetGridRunsData, GetGridRunsResponse, GetGridTiSummariesData, GetGridTiSummariesResponse, GetLatestRunData, GetLatestRunResponse, GetCalendarData, GetCalendarResponse } from './types.gen'; +import type { GetAssetsData, GetAssetsResponse, GetAssetAliasesData, GetAssetAliasesResponse, GetAssetAliasData, GetAssetAliasResponse, GetAssetEventsData, GetAssetEventsResponse, CreateAssetEventData, CreateAssetEventResponse, MaterializeAssetData, MaterializeAssetResponse, GetAssetQueuedEventsData, GetAssetQueuedEventsResponse, DeleteAssetQueuedEventsData, DeleteAssetQueuedEventsResponse, GetAssetData, GetAssetResponse, GetDagAssetQueuedEventsData, GetDagAssetQueuedEventsResponse, DeleteDagAssetQueuedEventsData, DeleteDagAssetQueuedEventsResponse, GetDagAssetQueuedEventData, GetDagAssetQueuedEventResponse, DeleteDagAssetQueuedEventData, DeleteDagAssetQueuedEventResponse, NextRunAssetsData, NextRunAssetsResponse, ListBackfillsData, ListBackfillsResponse, CreateBackfillData, CreateBackfillResponse, GetBackfillData, GetBackfillResponse, PauseBackfillData, PauseBackfillResponse, UnpauseBackfillData, UnpauseBackfillResponse, CancelBackfillData, CancelBackfillResponse, CreateBackfillDryRunData, CreateBackfillDryRunResponse, ListBackfillsUiData, ListBackfillsUiResponse, DeleteConnectionData, DeleteConnectionResponse, GetConnectionData, GetConnectionResponse, PatchConnectionData, PatchConnectionResponse, GetConnectionsData, GetConnectionsResponse, PostConnectionData, PostConnectionResponse, BulkConnectionsData, BulkConnectionsResponse, TestConnectionData, TestConnectionResponse, CreateDefaultConnectionsResponse, HookMetaDataResponse, GetDagRunData, GetDagRunResponse, DeleteDagRunData, DeleteDagRunResponse, PatchDagRunData, PatchDagRunResponse, GetUpstreamAssetEventsData, GetUpstreamAssetEventsResponse, ClearDagRunData, ClearDagRunResponse, GetDagRunsData, GetDagRunsResponse, TriggerDagRunData, TriggerDagRunResponse, WaitDagRunUntilFinishedData, WaitDagRunUntilFinishedResponse, GetListDagRunsBatchData, GetListDagRunsBatchResponse, GetDagSourceData, GetDagSourceResponse, GetDagStatsData, GetDagStatsResponse, GetDagReportsData, GetDagReportsResponse, GetConfigData, GetConfigResponse, GetConfigValueData, GetConfigValueResponse, GetConfigsResponse, ListDagWarningsData, ListDagWarningsResponse, GetDagsData, GetDagsResponse, PatchDagsData, PatchDagsResponse, GetDagData, GetDagResponse, PatchDagData, PatchDagResponse, DeleteDagData, DeleteDagResponse, GetDagDetailsData, GetDagDetailsResponse, FavoriteDagData, FavoriteDagResponse, UnfavoriteDagData, UnfavoriteDagResponse, GetDagTagsData, GetDagTagsResponse, GetDagsUiData, GetDagsUiResponse, GetEventLogData, GetEventLogResponse, GetEventLogsData, GetEventLogsResponse, GetExtraLinksData, GetExtraLinksResponse, GetTaskInstanceData, GetTaskInstanceResponse, PatchTaskInstanceData, PatchTaskInstanceResponse, DeleteTaskInstanceData, DeleteTaskInstanceResponse, GetMappedTaskInstancesData, GetMappedTaskInstancesResponse, GetTaskInstanceDependenciesByMapIndexData, GetTaskInstanceDependenciesByMapIndexResponse, GetTaskInstanceDependenciesData, GetTaskInstanceDependenciesResponse, GetTaskInstanceTriesData, GetTaskInstanceTriesResponse, GetMappedTaskInstanceTriesData, GetMappedTaskInstanceTriesResponse, GetMappedTaskInstanceData, GetMappedTaskInstanceResponse, PatchTaskInstanceByMapIndexData, PatchTaskInstanceByMapIndexResponse, GetTaskInstancesData, GetTaskInstancesResponse, BulkTaskInstancesData, BulkTaskInstancesResponse, GetTaskInstancesBatchData, GetTaskInstancesBatchResponse, GetTaskInstanceTryDetailsData, GetTaskInstanceTryDetailsResponse, GetMappedTaskInstanceTryDetailsData, GetMappedTaskInstanceTryDetailsResponse, PostClearTaskInstancesData, PostClearTaskInstancesResponse, PatchTaskInstanceDryRunByMapIndexData, PatchTaskInstanceDryRunByMapIndexResponse, PatchTaskInstanceDryRunData, PatchTaskInstanceDryRunResponse, GetLogData, GetLogResponse, GetExternalLogUrlData, GetExternalLogUrlResponse, GetImportErrorData, GetImportErrorResponse, GetImportErrorsData, GetImportErrorsResponse, GetJobsData, GetJobsResponse, GetPluginsData, GetPluginsResponse, ImportErrorsResponse, DeletePoolData, DeletePoolResponse, GetPoolData, GetPoolResponse, PatchPoolData, PatchPoolResponse, GetPoolsData, GetPoolsResponse, PostPoolData, PostPoolResponse, BulkPoolsData, BulkPoolsResponse, GetProvidersData, GetProvidersResponse, GetXcomEntryData, GetXcomEntryResponse, UpdateXcomEntryData, UpdateXcomEntryResponse, GetXcomEntriesData, GetXcomEntriesResponse, CreateXcomEntryData, CreateXcomEntryResponse, GetTasksData, GetTasksResponse, GetTaskData, GetTaskResponse, DeleteVariableData, DeleteVariableResponse, GetVariableData, GetVariableResponse, PatchVariableData, PatchVariableResponse, GetVariablesData, GetVariablesResponse, PostVariableData, PostVariableResponse, BulkVariablesData, BulkVariablesResponse, ReparseDagFileData, ReparseDagFileResponse, GetDagVersionData, GetDagVersionResponse, GetDagVersionsData, GetDagVersionsResponse, UpdateHitlResponseData, UpdateHitlResponseResponse, GetHitlResponseData, GetHitlResponseResponse, GetHitlResponsesResponse, GetHealthResponse, GetVersionResponse, LoginData, LoginResponse, LogoutData, LogoutResponse, GetAuthMenusResponse, GetDependenciesData, GetDependenciesResponse, HistoricalMetricsData, HistoricalMetricsResponse, DagStatsResponse2, StructureDataData, StructureDataResponse2, GetDagStructureData, GetDagStructureResponse, GetGridRunsData, GetGridRunsResponse, GetGridTiSummariesData, GetGridTiSummariesResponse, GetLatestRunData, GetLatestRunResponse, GetCalendarData, GetCalendarResponse } from './types.gen'; export class AssetService { /** @@ -3360,6 +3360,79 @@ export class DagVersionService { } +export class HumanInTheLoopService { + /** + * Update Hitl Response + * Update a Human-in-the-loop response. + * @param data The data for the request. + * @param data.taskInstanceId + * @param data.requestBody + * @returns HITLResponseContentDetail Successful Response + * @throws ApiError + */ + public static updateHitlResponse(data: UpdateHitlResponseData): CancelablePromise { + return __request(OpenAPI, { + method: 'PATCH', + url: '/api/v2/hitl-responses/{task_instance_id}', + path: { + task_instance_id: data.taskInstanceId + }, + body: data.requestBody, + mediaType: 'application/json', + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 409: 'Conflict', + 422: 'Validation Error' + } + }); + } + + /** + * Get Hitl Response + * Get a Human-in-the-loop Response of a specific task instance. + * @param data The data for the request. + * @param data.taskInstanceId + * @returns HITLResponseDetail Successful Response + * @throws ApiError + */ + public static getHitlResponse(data: GetHitlResponseData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/hitl-responses/{task_instance_id}', + path: { + task_instance_id: data.taskInstanceId + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 409: 'Conflict', + 422: 'Validation Error' + } + }); + } + + /** + * Get Hitl Responses + * Get Human-in-the-loop Responses. + * @returns HITLResponseDetailCollection Successful Response + * @throws ApiError + */ + public static getHitlResponses(): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/hitl-responses/', + errors: { + 401: 'Unauthorized', + 403: 'Forbidden' + } + }); + } + +} + export class MonitorService { /** * Get Health diff --git a/airflow-core/src/airflow/ui/openapi-gen/requests/types.gen.ts b/airflow-core/src/airflow/ui/openapi-gen/requests/types.gen.ts index ed4e32db57f84..d411efe075449 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/requests/types.gen.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/requests/types.gen.ts @@ -917,6 +917,45 @@ export type FastAPIRootMiddlewareResponse = { [key: string]: unknown | string; }; +/** + * Response of updating a Human-in-the-loop response. + */ +export type HITLResponseContentDetail = { + response_content: string; + response_at: string; + user_id: string; +}; + +/** + * Schema for Human-in-the-loop response. + */ +export type HITLResponseDetail = { + ti_id: string; + options: Array<(string)>; + subject: string; + body?: string | null; + default?: Array<(string)> | null; + multiple?: boolean; + params?: { + [key: string]: unknown; +} | null; + response_at?: string | null; + user_id?: string | null; + response_content?: string | null; + params_input?: { + [key: string]: unknown; +} | null; + response_received?: boolean; +}; + +/** + * Schema for a collection of Human-in-the-loop responses. + */ +export type HITLResponseDetailCollection = { + hitl_responses: Array; + total_entries: number; +}; + /** * HTTPException Model used for error response. */ @@ -1429,6 +1468,13 @@ export type TriggererInfoResponse = { latest_triggerer_heartbeat: string | null; }; +/** + * Schema for updating the content of a Human-in-the-loop response. + */ +export type UpdateHITLResponsePayload = { + response_content: string; +}; + export type ValidationError = { loc: Array<(string | number)>; msg: string; @@ -2847,6 +2893,21 @@ export type GetDagVersionsData = { export type GetDagVersionsResponse = DAGVersionCollectionResponse; +export type UpdateHitlResponseData = { + requestBody: UpdateHITLResponsePayload; + taskInstanceId: string; +}; + +export type UpdateHitlResponseResponse = HITLResponseContentDetail; + +export type GetHitlResponseData = { + taskInstanceId: string; +}; + +export type GetHitlResponseResponse = HITLResponseDetail; + +export type GetHitlResponsesResponse = HITLResponseDetailCollection; + export type GetHealthResponse = HealthInfoResponse; export type GetVersionResponse = VersionInfo; @@ -5793,6 +5854,84 @@ export type $OpenApiTs = { }; }; }; + '/api/v2/hitl-responses/{task_instance_id}': { + patch: { + req: UpdateHitlResponseData; + res: { + /** + * Successful Response + */ + 200: HITLResponseContentDetail; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Conflict + */ + 409: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + get: { + req: GetHitlResponseData; + res: { + /** + * Successful Response + */ + 200: HITLResponseDetail; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Conflict + */ + 409: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; + '/api/v2/hitl-responses/': { + get: { + res: { + /** + * Successful Response + */ + 200: HITLResponseDetailCollection; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + }; + }; + }; '/api/v2/monitor/health': { get: { res: { From ae41a3a546429a129b0a6990f77605dd8d090958 Mon Sep 17 00:00:00 2001 From: Wei Lee Date: Sat, 5 Jul 2025 10:49:17 +0800 Subject: [PATCH 08/30] fix(hitl): generate datamodel --- .../airflowctl/api/datamodels/generated.py | 46 +++++++++++++++++++ 1 file changed, 46 insertions(+) diff --git a/airflow-ctl/src/airflowctl/api/datamodels/generated.py b/airflow-ctl/src/airflowctl/api/datamodels/generated.py index c64907a938a47..f8170f2a2dee5 100644 --- a/airflow-ctl/src/airflowctl/api/datamodels/generated.py +++ b/airflow-ctl/src/airflowctl/api/datamodels/generated.py @@ -573,6 +573,44 @@ class FastAPIRootMiddlewareResponse(BaseModel): name: Annotated[str, Field(title="Name")] +class HITLResponseContentDetail(BaseModel): + """ + Response of updating a Human-in-the-loop response. + """ + + response_content: Annotated[str, Field(title="Response Content")] + response_at: Annotated[datetime, Field(title="Response At")] + user_id: Annotated[str, Field(title="User Id")] + + +class HITLResponseDetail(BaseModel): + """ + Schema for Human-in-the-loop response. + """ + + ti_id: Annotated[str, Field(title="Ti Id")] + options: Annotated[list[str], Field(title="Options")] + subject: Annotated[str, Field(title="Subject")] + body: Annotated[str | None, Field(title="Body")] = None + default: Annotated[list[str] | None, Field(title="Default")] = None + multiple: Annotated[bool | None, Field(title="Multiple")] = False + params: Annotated[dict[str, Any] | None, Field(title="Params")] = None + response_at: Annotated[datetime | None, Field(title="Response At")] = None + user_id: Annotated[str | None, Field(title="User Id")] = None + response_content: Annotated[str | None, Field(title="Response Content")] = None + params_input: Annotated[dict[str, Any] | None, Field(title="Params Input")] = None + response_received: Annotated[bool | None, Field(title="Response Received")] = False + + +class HITLResponseDetailCollection(BaseModel): + """ + Schema for a collection of Human-in-the-loop responses. + """ + + hitl_responses: Annotated[list[HITLResponseDetail], Field(title="Hitl Responses")] + total_entries: Annotated[int, Field(title="Total Entries")] + + class HTTPExceptionResponse(BaseModel): """ HTTPException Model used for error response. @@ -899,6 +937,14 @@ class TriggererInfoResponse(BaseModel): latest_triggerer_heartbeat: Annotated[str | None, Field(title="Latest Triggerer Heartbeat")] = None +class UpdateHITLResponsePayload(BaseModel): + """ + Schema for updating the content of a Human-in-the-loop response. + """ + + response_content: Annotated[str, Field(title="Response Content")] + + class ValidationError(BaseModel): loc: Annotated[list[str | int], Field(title="Location")] msg: Annotated[str, Field(title="Message")] From 48e0eebfc828b294bc6b39be3435deff46886ab1 Mon Sep 17 00:00:00 2001 From: Wei Lee Date: Sat, 5 Jul 2025 11:41:04 +0800 Subject: [PATCH 09/30] fix(hitl): add HITL module to models --- airflow-core/src/airflow/models/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/airflow-core/src/airflow/models/__init__.py b/airflow-core/src/airflow/models/__init__.py index 3e472b70b4bca..0471ef7f40c6f 100644 --- a/airflow-core/src/airflow/models/__init__.py +++ b/airflow-core/src/airflow/models/__init__.py @@ -103,6 +103,7 @@ def __getattr__(name): "DbCallbackRequest": "airflow.models.db_callback_request", "Deadline": "airflow.models.deadline", "Log": "airflow.models.log", + "HITLResponseModel": "airflow.models.hitl", "MappedOperator": "airflow.models.mappedoperator", "Operator": "airflow.models.operator", "Param": "airflow.sdk.definitions.param", From 939d288b18556cde070beb5e828e4fefd75bc559 Mon Sep 17 00:00:00 2001 From: Wei Lee Date: Mon, 7 Jul 2025 23:29:37 +0800 Subject: [PATCH 10/30] feat(hitl): add multiple support --- .../api_fastapi/core_api/datamodels/hitl.py | 24 ++-- .../openapi/v2-rest-api-generated.yaml | 52 ++++--- .../core_api/routes/public/hitl.py | 7 +- .../execution_api/datamodels/hitl.py | 16 ++- .../api_fastapi/execution_api/routes/hitl.py | 4 +- ...77_3_1_0_add_human_in_the_loop_response.py | 4 +- airflow-core/src/airflow/models/hitl.py | 4 +- .../ui/openapi-gen/requests/schemas.gen.ts | 67 +++++---- .../ui/openapi-gen/requests/types.gen.ts | 24 ++-- .../airflowctl/api/datamodels/generated.py | 12 +- .../providers/standard/operators/hitl.py | 132 +++++++++--------- .../providers/standard/triggers/hitl.py | 71 ++++++---- task-sdk/src/airflow/sdk/api/client.py | 4 +- .../airflow/sdk/api/datamodels/_generated.py | 6 +- .../src/airflow/sdk/execution_time/hitl.py | 5 +- task-sdk/tests/task_sdk/api/test_client.py | 106 ++++++++++++++ 16 files changed, 346 insertions(+), 192 deletions(-) diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/hitl.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/hitl.py index 9ab104436aa89..aba82ec6ec73f 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/hitl.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/hitl.py @@ -16,10 +16,10 @@ # under the License. from __future__ import annotations -from collections.abc import MutableMapping +from collections.abc import Mapping from datetime import datetime -from pydantic import field_validator +from pydantic import Field, field_validator from airflow.api_fastapi.core_api.base import BaseModel @@ -27,15 +27,17 @@ class UpdateHITLResponsePayload(BaseModel): """Schema for updating the content of a Human-in-the-loop response.""" - response_content: str + response_content: list[str] + params_input: Mapping = Field(default_factory=dict) class HITLResponseContentDetail(BaseModel): """Response of updating a Human-in-the-loop response.""" - response_content: str - response_at: datetime user_id: str + response_at: datetime + response_content: list[str] + params_input: Mapping = Field(default_factory=dict) class HITLResponseDetail(BaseModel): @@ -49,22 +51,20 @@ class HITLResponseDetail(BaseModel): body: str | None = None default: list[str] | None = None multiple: bool = False - params: MutableMapping | None = None + params: Mapping = Field(default_factory=dict) # Response Content Detail - response_at: datetime | None = None user_id: str | None = None - response_content: str | None = None - params_input: MutableMapping | None = None + response_at: datetime | None = None + response_content: list[str] | None = None + params_input: Mapping = Field(default_factory=dict) response_received: bool = False @field_validator("params", mode="before") @classmethod - def get_params(cls, params: MutableMapping | None) -> dict | None: + def get_params(cls, params: Mapping) -> Mapping: """Convert params attribute to dict representation.""" - if params is None: - return None return {k: v.dump() for k, v in params.items()} diff --git a/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml b/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml index b0a3ff994380f..8a062183f84ea 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml +++ b/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml @@ -9734,21 +9734,27 @@ components: description: Serializer for Plugin FastAPI root middleware responses. HITLResponseContentDetail: properties: - response_content: + user_id: type: string - title: Response Content + title: User Id response_at: type: string format: date-time title: Response At - user_id: - type: string - title: User Id + response_content: + items: + type: string + type: array + title: Response Content + params_input: + additionalProperties: true + type: object + title: Params Input type: object required: - - response_content - - response_at - user_id + - response_at + - response_content title: HITLResponseContentDetail description: Response of updating a Human-in-the-loop response. HITLResponseDetail: @@ -9781,32 +9787,30 @@ components: title: Multiple default: false params: + additionalProperties: true + type: object + title: Params + user_id: anyOf: - - additionalProperties: true - type: object + - type: string - type: 'null' - title: Params + title: User Id response_at: anyOf: - type: string format: date-time - type: 'null' title: Response At - user_id: - anyOf: - - type: string - - type: 'null' - title: User Id response_content: anyOf: - - type: string + - items: + type: string + type: array - type: 'null' title: Response Content params_input: - anyOf: - - additionalProperties: true - type: object - - type: 'null' + additionalProperties: true + type: object title: Params Input response_received: type: boolean @@ -11367,8 +11371,14 @@ components: UpdateHITLResponsePayload: properties: response_content: - type: string + items: + type: string + type: array title: Response Content + params_input: + additionalProperties: true + type: object + title: Params Input type: object required: - response_content diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/hitl.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/hitl.py index 1a3dde1cebb08..fc01e29e15518 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/hitl.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/hitl.py @@ -78,9 +78,10 @@ def update_hitl_response( "and is not allowed to write again.", ) - hitl_response_model.response_content = update_hitl_response_payload.response_content hitl_response_model.user_id = user.get_id() hitl_response_model.response_at = timezone.utcnow() + hitl_response_model.response_content = update_hitl_response_payload.response_content + hitl_response_model.params_input = update_hitl_response_payload.params_input session.add(hitl_response_model) session.commit() return HITLResponseContentDetail.model_validate(hitl_response_model) @@ -95,7 +96,9 @@ def update_hitl_response( status.HTTP_409_CONFLICT, ] ), - dependencies=[Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.TASK_INSTANCE))], + dependencies=[ + Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.TASK_INSTANCE)), + ], ) def get_hitl_response( task_instance_id: UUID, diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/hitl.py b/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/hitl.py index a1e74c812a9c3..0e2eb22e0f714 100644 --- a/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/hitl.py +++ b/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/hitl.py @@ -16,10 +16,12 @@ # under the License. from __future__ import annotations -from collections.abc import MutableMapping from datetime import datetime +from typing import Any from uuid import UUID +from pydantic import Field + from airflow.api_fastapi.core_api.base import BaseModel @@ -32,7 +34,7 @@ class HITLInputRequestResponse(BaseModel): body: str | None = None default: list[str] | None = None multiple: bool = False - params: MutableMapping | None = None + params: dict[str, Any] = Field(default_factory=dict) class GetHITLResponseContentDetailPayload(BaseModel): @@ -45,15 +47,15 @@ class UpdateHITLResponsePayload(BaseModel): """Schema for writing a Human-in-the-loop response content detail for a specific task instance.""" ti_id: UUID - response_content: str - params_input: MutableMapping | None = None + response_content: list[str] + params_input: dict[str, Any] = Field(default_factory=dict) class HITLResponseContentDetail(BaseModel): """Schema for Human-in-the-loop response content detail for a specific task instance.""" response_received: bool - response_at: datetime | None user_id: str | None - response_content: str | None - params_input: MutableMapping | None = None + response_at: datetime | None + response_content: list[str] | None + params_input: dict[str, Any] = Field(default_factory=dict) diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/routes/hitl.py b/airflow-core/src/airflow/api_fastapi/execution_api/routes/hitl.py index e5316ec36aebd..50b8144256e62 100644 --- a/airflow-core/src/airflow/api_fastapi/execution_api/routes/hitl.py +++ b/airflow-core/src/airflow/api_fastapi/execution_api/routes/hitl.py @@ -87,7 +87,7 @@ def update_hitl_response( f"Human-in-the-loop Response Content for Task Instance with id {ti_id_str} already exists.", ) - hitl_response_model.user_id = "fallback to default" + hitl_response_model.user_id = "Fallback to default" hitl_response_model.response_content = payload.response_content hitl_response_model.params_input = payload.params_input hitl_response_model.response_at = datetime.now(timezone.utc) @@ -98,6 +98,7 @@ def update_hitl_response( response_at=hitl_response_model.response_at, user_id=hitl_response_model.user_id, response_content=hitl_response_model.response_content, + params_input=hitl_response_model.params_input, ) @@ -119,4 +120,5 @@ def get_hitl_response( response_at=hitl_response_model.response_at, user_id=hitl_response_model.user_id, response_content=hitl_response_model.response_content, + params_input=hitl_response_model.params_input or {}, ) diff --git a/airflow-core/src/airflow/migrations/versions/0077_3_1_0_add_human_in_the_loop_response.py b/airflow-core/src/airflow/migrations/versions/0077_3_1_0_add_human_in_the_loop_response.py index 742114c8b269d..7459d93b72b25 100644 --- a/airflow-core/src/airflow/migrations/versions/0077_3_1_0_add_human_in_the_loop_response.py +++ b/airflow-core/src/airflow/migrations/versions/0077_3_1_0_add_human_in_the_loop_response.py @@ -58,11 +58,11 @@ def upgrade(): Column("body", Text, nullable=True), Column("default", sqlalchemy_jsonfield.JSONField(json=json), nullable=True), Column("multiple", Boolean, unique=False, default=False), - Column("params", sqlalchemy_jsonfield.JSONField(json=json), nullable=True), + Column("params", sqlalchemy_jsonfield.JSONField(json=json), nullable=False, default={}), Column("response_at", UtcDateTime, nullable=True), Column("user_id", String(128), nullable=True), Column("response_content", sqlalchemy_jsonfield.JSONField(json=json), nullable=True), - Column("params_input", sqlalchemy_jsonfield.JSONField(json=json), nullable=True, default=None), + Column("params_input", sqlalchemy_jsonfield.JSONField(json=json), nullable=False, default={}), ) diff --git a/airflow-core/src/airflow/models/hitl.py b/airflow-core/src/airflow/models/hitl.py index d1b9a0266e7bd..77278170a11a2 100644 --- a/airflow-core/src/airflow/models/hitl.py +++ b/airflow-core/src/airflow/models/hitl.py @@ -42,7 +42,7 @@ class HITLResponseModel(Base): body = Column(Text, nullable=True) default = Column(sqlalchemy_jsonfield.JSONField(json=json), nullable=True) multiple = Column(Boolean, unique=False, default=False) - params = Column(sqlalchemy_jsonfield.JSONField(json=json), nullable=True) + params = Column(sqlalchemy_jsonfield.JSONField(json=json), nullable=False, default={}) # Response Content Detail response_at = Column(UtcDateTime, nullable=True) @@ -52,7 +52,7 @@ class HITLResponseModel(Base): nullable=True, default=None, ) - params_input = Column(sqlalchemy_jsonfield.JSONField(json=json), nullable=True) + params_input = Column(sqlalchemy_jsonfield.JSONField(json=json), nullable=False, default={}) @hybrid_property def response_received(self) -> bool: diff --git a/airflow-core/src/airflow/ui/openapi-gen/requests/schemas.gen.ts b/airflow-core/src/airflow/ui/openapi-gen/requests/schemas.gen.ts index e83e454e0873b..f1f3b550a7093 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/requests/schemas.gen.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/requests/schemas.gen.ts @@ -3410,22 +3410,30 @@ export const $FastAPIRootMiddlewareResponse = { export const $HITLResponseContentDetail = { properties: { - response_content: { + user_id: { type: 'string', - title: 'Response Content' + title: 'User Id' }, response_at: { type: 'string', format: 'date-time', title: 'Response At' }, - user_id: { - type: 'string', - title: 'User Id' + response_content: { + items: { + type: 'string' + }, + type: 'array', + title: 'Response Content' + }, + params_input: { + additionalProperties: true, + type: 'object', + title: 'Params Input' } }, type: 'object', - required: ['response_content', 'response_at', 'user_id'], + required: ['user_id', 'response_at', 'response_content'], title: 'HITLResponseContentDetail', description: 'Response of updating a Human-in-the-loop response.' } as const; @@ -3478,16 +3486,20 @@ export const $HITLResponseDetail = { default: false }, params: { + additionalProperties: true, + type: 'object', + title: 'Params' + }, + user_id: { anyOf: [ { - additionalProperties: true, - type: 'object' + type: 'string' }, { type: 'null' } ], - title: 'Params' + title: 'User Id' }, response_at: { anyOf: [ @@ -3501,21 +3513,13 @@ export const $HITLResponseDetail = { ], title: 'Response At' }, - user_id: { - anyOf: [ - { - type: 'string' - }, - { - type: 'null' - } - ], - title: 'User Id' - }, response_content: { anyOf: [ { - type: 'string' + items: { + type: 'string' + }, + type: 'array' }, { type: 'null' @@ -3524,15 +3528,8 @@ export const $HITLResponseDetail = { title: 'Response Content' }, params_input: { - anyOf: [ - { - additionalProperties: true, - type: 'object' - }, - { - type: 'null' - } - ], + additionalProperties: true, + type: 'object', title: 'Params Input' }, response_received: { @@ -5858,8 +5855,16 @@ export const $TriggererInfoResponse = { export const $UpdateHITLResponsePayload = { properties: { response_content: { - type: 'string', + items: { + type: 'string' + }, + type: 'array', title: 'Response Content' + }, + params_input: { + additionalProperties: true, + type: 'object', + title: 'Params Input' } }, type: 'object', diff --git a/airflow-core/src/airflow/ui/openapi-gen/requests/types.gen.ts b/airflow-core/src/airflow/ui/openapi-gen/requests/types.gen.ts index d411efe075449..a6dde68727068 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/requests/types.gen.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/requests/types.gen.ts @@ -921,9 +921,12 @@ export type FastAPIRootMiddlewareResponse = { * Response of updating a Human-in-the-loop response. */ export type HITLResponseContentDetail = { - response_content: string; - response_at: string; user_id: string; + response_at: string; + response_content: Array<(string)>; + params_input?: { + [key: string]: unknown; + }; }; /** @@ -937,14 +940,14 @@ export type HITLResponseDetail = { default?: Array<(string)> | null; multiple?: boolean; params?: { - [key: string]: unknown; -} | null; - response_at?: string | null; + [key: string]: unknown; + }; user_id?: string | null; - response_content?: string | null; + response_at?: string | null; + response_content?: Array<(string)> | null; params_input?: { - [key: string]: unknown; -} | null; + [key: string]: unknown; + }; response_received?: boolean; }; @@ -1472,7 +1475,10 @@ export type TriggererInfoResponse = { * Schema for updating the content of a Human-in-the-loop response. */ export type UpdateHITLResponsePayload = { - response_content: string; + response_content: Array<(string)>; + params_input?: { + [key: string]: unknown; + }; }; export type ValidationError = { diff --git a/airflow-ctl/src/airflowctl/api/datamodels/generated.py b/airflow-ctl/src/airflowctl/api/datamodels/generated.py index f8170f2a2dee5..d4cdd71200d92 100644 --- a/airflow-ctl/src/airflowctl/api/datamodels/generated.py +++ b/airflow-ctl/src/airflowctl/api/datamodels/generated.py @@ -578,9 +578,10 @@ class HITLResponseContentDetail(BaseModel): Response of updating a Human-in-the-loop response. """ - response_content: Annotated[str, Field(title="Response Content")] - response_at: Annotated[datetime, Field(title="Response At")] user_id: Annotated[str, Field(title="User Id")] + response_at: Annotated[datetime, Field(title="Response At")] + response_content: Annotated[list[str], Field(title="Response Content")] + params_input: Annotated[dict[str, Any] | None, Field(title="Params Input")] = None class HITLResponseDetail(BaseModel): @@ -595,9 +596,9 @@ class HITLResponseDetail(BaseModel): default: Annotated[list[str] | None, Field(title="Default")] = None multiple: Annotated[bool | None, Field(title="Multiple")] = False params: Annotated[dict[str, Any] | None, Field(title="Params")] = None - response_at: Annotated[datetime | None, Field(title="Response At")] = None user_id: Annotated[str | None, Field(title="User Id")] = None - response_content: Annotated[str | None, Field(title="Response Content")] = None + response_at: Annotated[datetime | None, Field(title="Response At")] = None + response_content: Annotated[list[str] | None, Field(title="Response Content")] = None params_input: Annotated[dict[str, Any] | None, Field(title="Params Input")] = None response_received: Annotated[bool | None, Field(title="Response Received")] = False @@ -942,7 +943,8 @@ class UpdateHITLResponsePayload(BaseModel): Schema for updating the content of a Human-in-the-loop response. """ - response_content: Annotated[str, Field(title="Response Content")] + response_content: Annotated[list[str], Field(title="Response Content")] + params_input: Annotated[dict[str, Any] | None, Field(title="Params Input")] = None class ValidationError(BaseModel): diff --git a/providers/standard/src/airflow/providers/standard/operators/hitl.py b/providers/standard/src/airflow/providers/standard/operators/hitl.py index 22c09638f101f..28e78638d6830 100644 --- a/providers/standard/src/airflow/providers/standard/operators/hitl.py +++ b/providers/standard/src/airflow/providers/standard/operators/hitl.py @@ -16,45 +16,43 @@ # under the License. from __future__ import annotations -import logging -from collections.abc import Sequence +from airflow.providers.standard.version_compat import AIRFLOW_V_3_1_PLUS + +if not AIRFLOW_V_3_1_PLUS: + raise ImportError("Human in the loop functionality needs Airflow 3.1+.") + + +from collections.abc import Collection, Mapping from datetime import datetime, timezone from typing import TYPE_CHECKING, Any from airflow.models import SkipMixin from airflow.models.baseoperator import BaseOperator from airflow.providers.standard.exceptions import HITLTriggerEventError -from airflow.providers.standard.triggers.hitl import HITLTrigger -from airflow.providers.standard.version_compat import AIRFLOW_V_3_1_PLUS +from airflow.providers.standard.triggers.hitl import HITLTrigger, HITLTriggerEventSuccessPayload from airflow.sdk.definitions.param import ParamsDict from airflow.sdk.execution_time.hitl import add_hitl_response if TYPE_CHECKING: from airflow.sdk.definitions.context import Context -log = logging.getLogger(__name__) -if not AIRFLOW_V_3_1_PLUS: - log.warning("Human in the loop functionality needs Airflow 3.1+..") - class HITLOperator(BaseOperator): """ Base class for all Human-in-the-loop Operators to inherit from. - :param subject: Headline/subject presented to the user for the interaction task - :param options: List of options that the human can select from and click to complete the task. - Buttons on the UI will be presented in the order of the list + :param subject: Headline/subject presented to the user for the interaction task. + :param options: List of options that the an user can select from to complete the task. :param body: descriptive text that might give background, hints or can provide background or summary of - details that are needed to decide - :param default: The default result (highlighted button) and result that is taken if timeout is passed + details that are needed to decide. + :param default: The default option and the option that is taken if timeout is passed. + :param multiple: Whether the user can select one or multiple options. :param params: dictionary of parameter definitions that are in the format of Dag params such that a Form Field can be rendered. Entered data is validated (schema, required fields) like for a Dag run - and added to XCom of the task result + and added to XCom of the task result. """ - template_fields: Sequence[str] = ("subject", "body") - - allow_arbitrary_input: bool = False + template_fields: Collection[str] = ("subject", "body") def __init__( self, @@ -63,24 +61,43 @@ def __init__( options: list[str], body: str | None = None, default: str | list[str] | None = None, + multiple: bool = False, params: ParamsDict | None = None, **kwargs, ) -> None: super().__init__(**kwargs) - self.options = options self.subject = subject self.body = body - self.params = params or {} - self.multiple = False + + self.options = options + # allow defaults to store more than one options when multiple=True self.default = [default] if isinstance(default, str) else default + self.multiple = multiple + + self.params: ParamsDict | dict = params or {} - self.validate_default() + self.validate_defaults() - def validate_default(self) -> None: + def validate_defaults(self) -> None: + """ + Validate whether the given default pass the following criteria. + + 1. When timeout is set, default options should be provided. + 2. Default options should be the subset of options. + 3. When multiple is False, there should only be one option. + """ if self.default is None and self.execution_timeout: raise ValueError('"default" is required when "execution_timeout" is provided.') + if self.default is not None: + if not set(self.default).issubset(self.options): + raise ValueError(f'default "{self.default}" should be a subset of options "{self.options}"') + + if self.multiple is False and len(self.default) > 1: + raise ValueError('More than one default given when "multiple" is set to False.') + def execute(self, context: Context): + """Add a Human-in-the-loop Response and then defer to HITLTrigger and wait for user input.""" ti_id = context["task_instance"].id # Write Human-in-the-loop input request to DB add_hitl_response( @@ -90,7 +107,7 @@ def execute(self, context: Context): body=self.body, default=self.default, multiple=self.multiple, - params=self.serializable_params, + params=self.serialzed_params, ) self.log.info("Waiting for response") if self.execution_timeout: @@ -103,52 +120,44 @@ def execute(self, context: Context): ti_id=ti_id, options=self.options, default=self.default, - params=self.serializable_params, + params=self.serialzed_params, multiple=self.multiple, timeout_datetime=timeout_datetime, ), method_name="execute_complete", ) + @property + def serialzed_params(self) -> dict[str, Any]: + return self.params.dump() if isinstance(self.params, ParamsDict) else self.params + def execute_complete(self, context: Context, event: dict[str, Any]) -> Any: if "error" in event: raise HITLTriggerEventError(event["error"]) response_content = event["response_content"] - params_input = event["params_input"] - if self.allow_arbitrary_input: - self.validate_response_content(response_content) + params_input = event["params_input"] or {} + self.validate_response_content(response_content) self.validate_params_input(params_input) - return { - "response_content": response_content, - "params_input": params_input, - } - - def validate_response_content(self, response_content: str | list[str]) -> None: - if isinstance(response_content, list): - if self.multiple is False: - raise ValueError( - f"Multiple response {response_content} received while multiple is set to False" - ) - - if diff := set(response_content) - set(self.options): - raise ValueError(f"Responses {diff} not in {self.options}") + return HITLTriggerEventSuccessPayload( + response_content=response_content, + params_input=params_input, + ) - if response_content not in self.options: - raise ValueError(f"Response {response_content} not in {self.options}") + def validate_response_content(self, response_content: list[str]) -> None: + """Check whether user provide valid response.""" + if diff := set(response_content) - set(self.options): + raise ValueError(f"Responses {diff} not in {self.options}") - def validate_params_input(self, params_input: dict | None) -> None: + def validate_params_input(self, params_input: Mapping) -> None: + """Check whether user provide valid params input.""" if ( - self.serializable_params is not None + self.serialzed_params is not None and params_input is not None - and set(self.serializable_params.keys()) ^ set(params_input) + and set(self.serialzed_params.keys()) ^ set(params_input) ): raise ValueError(f"params_input {params_input} does not match params {self.params}") - @property - def serializable_params(self) -> dict[str, Any] | None: - return self.params.dump() if isinstance(self.params, ParamsDict) else self.params - class ApprovalOperator(HITLOperator): """Human-in-the-loop Operator that has only 'Approval' and 'Reject' options.""" @@ -161,7 +170,11 @@ def __init__(self, **kwargs) -> None: class HITLTerminationOperator(HITLOperator, SkipMixin): - """ShortCirquitOperator to terminate the Dag run by human decision.""" + """ + Human-in-the-loop Operator that has only 'Stop' and 'Proceed' options. + + When 'Stop' is selected by user, the dag run terminates like ShortCirquitOperator. + """ def __init__(self, **kwargs) -> None: if "options" in kwargs: @@ -176,30 +189,19 @@ def execute_complete(self, context: Context, event: dict[str, Any]) -> None: class HITLBranchOperator(HITLOperator): """BranchOperator based on Human-in-the-loop Response.""" - def __init__(self, *, multiple: bool = False, **kwargs) -> None: + def __init__(self, **kwargs) -> None: super().__init__(**kwargs) - self.multiple = multiple def execute_complete(self, context: Context, event: dict[str, Any]) -> None: raise NotImplementedError class HITLEntryOperator(HITLOperator): - """ - User can add further information with all options that a TriggerForm allows (same like Dag params). + """Human-in-the-loop Operator that is used to accept user input through TriggerForm.""" - Options and default default to ["OK"] but can be over-ridden. - """ - - def __init__( - self, - **kwargs, - ) -> None: + def __init__(self, **kwargs) -> None: if "options" not in kwargs: kwargs["options"] = ["OK"] kwargs["default"] = ["OK"] super().__init__(**kwargs) - - def execute_complete(self, context: Context, event: dict[str, Any]) -> None: - raise NotImplementedError diff --git a/providers/standard/src/airflow/providers/standard/triggers/hitl.py b/providers/standard/src/airflow/providers/standard/triggers/hitl.py index 59638a34d0c39..f61a9e4960285 100644 --- a/providers/standard/src/airflow/providers/standard/triggers/hitl.py +++ b/providers/standard/src/airflow/providers/standard/triggers/hitl.py @@ -16,25 +16,38 @@ # under the License. from __future__ import annotations +from airflow.providers.standard.version_compat import AIRFLOW_V_3_1_PLUS + +if not AIRFLOW_V_3_1_PLUS: + raise ImportError("Human in the loop functionality needs Airflow 3.1+.") + import asyncio -import logging from collections.abc import AsyncIterator -from datetime import datetime, timezone -from typing import Any +from datetime import datetime +from typing import Any, TypedDict from uuid import UUID from asgiref.sync import sync_to_async -from airflow.providers.standard.version_compat import AIRFLOW_V_3_1_PLUS from airflow.sdk.execution_time.hitl import ( get_hitl_response_content_detail, update_htil_response_content_detail, ) from airflow.triggers.base import BaseTrigger, TriggerEvent +from airflow.utils import timezone -log = logging.getLogger(__name__) -if not AIRFLOW_V_3_1_PLUS: - log.warning("Human in the loop functionality needs Airflow 3.1+..") + +class HITLTriggerEventSuccessPayload(TypedDict, total=False): + """Minimum required keys for a success Human-in-the-loop TriggerEvent.""" + + response_content: list[str] + params_input: dict[str, Any] + + +class HITLTriggerEventFailurePayload(TypedDict): + """Minimum required keys for a failed Human-in-the-loop TriggerEvent.""" + + error: str class HITLTrigger(BaseTrigger): @@ -45,8 +58,8 @@ def __init__( *, ti_id: UUID, options: list[str], - default: list[str] | None = None, params: dict[str, Any], + default: list[str] | None = None, multiple: bool = False, timeout_datetime: datetime | None, poke_interval: float = 5.0, @@ -54,12 +67,14 @@ def __init__( ): super().__init__(**kwargs) self.ti_id = ti_id + self.poke_interval = poke_interval + self.options = options - self.timeout_datetime = timeout_datetime + self.multiple = multiple self.default = default + self.timeout_datetime = timeout_datetime + self.params = params - self.multiple = multiple - self.poke_interval = poke_interval def serialize(self) -> tuple[str, dict[str, Any]]: """Serialize HITLTrigger arguments and classpath.""" @@ -79,35 +94,37 @@ def serialize(self) -> tuple[str, dict[str, Any]]: async def run(self) -> AsyncIterator[TriggerEvent]: """Loop until the Human-in-the-loop response received or timeout reached.""" while True: - if self.timeout_datetime and self.timeout_datetime < datetime.now(timezone.utc): + if self.timeout_datetime and self.timeout_datetime < timezone.utcnow(): + # This normally should be checked in the HITLOperator if self.default is None: yield TriggerEvent( - { - "error": 'default" is required when "execution_timeout" is provided.', - } + HITLTriggerEventFailurePayload( + error='default" is required when "execution_timeout" is provided.' + ) ) return - default_content: str = self.default[0] if isinstance(self.default, list) else self.default - resp = await sync_to_async(update_htil_response_content_detail)( - ti_id=self.ti_id, response_content=default_content + await sync_to_async(update_htil_response_content_detail)( + ti_id=self.ti_id, + response_content=self.default, + params_input=self.params, ) yield TriggerEvent( - { - "response_content": default_content, - "params_input": self.params, - } + HITLTriggerEventSuccessPayload( + response_content=self.default, + params_input=self.params, + ) ) return resp = await sync_to_async(get_hitl_response_content_detail)(ti_id=self.ti_id) - if resp.response_received: + if resp.response_received and resp.response_content: self.log.info("Responded by %s at %s", resp.user_id, resp.response_at) yield TriggerEvent( - { - "response_content": resp.response_content, - "params_input": resp.params_input, - } + HITLTriggerEventSuccessPayload( + response_content=resp.response_content, + params_input=resp.params_input, + ) ) return await asyncio.sleep(self.poke_interval) diff --git a/task-sdk/src/airflow/sdk/api/client.py b/task-sdk/src/airflow/sdk/api/client.py index 45d35bac0ff9e..0bce11b064393 100644 --- a/task-sdk/src/airflow/sdk/api/client.py +++ b/task-sdk/src/airflow/sdk/api/client.py @@ -665,8 +665,8 @@ def update_response( self, *, ti_id: uuid.UUID, - response_content: str, - params_input: dict[str, Any] | None = None, + response_content: list[str], + params_input: dict[str, Any], ) -> HITLResponseContentDetail: """Update an existing Human-in-the-loop response.""" payload = UpdateHITLResponse( diff --git a/task-sdk/src/airflow/sdk/api/datamodels/_generated.py b/task-sdk/src/airflow/sdk/api/datamodels/_generated.py index a515fe65cd8fa..cebdfec87ee84 100644 --- a/task-sdk/src/airflow/sdk/api/datamodels/_generated.py +++ b/task-sdk/src/airflow/sdk/api/datamodels/_generated.py @@ -191,9 +191,9 @@ class HITLResponseContentDetail(BaseModel): """ response_received: Annotated[bool, Field(title="Response Received")] - response_at: Annotated[AwareDatetime | None, Field(title="Response At")] = None user_id: Annotated[str | None, Field(title="User Id")] = None - response_content: Annotated[str | None, Field(title="Response Content")] = None + response_at: Annotated[AwareDatetime | None, Field(title="Response At")] = None + response_content: Annotated[list[str] | None, Field(title="Response Content")] = None params_input: Annotated[dict[str, Any] | None, Field(title="Params Input")] = None @@ -374,7 +374,7 @@ class UpdateHITLResponse(BaseModel): """ ti_id: Annotated[UUID, Field(title="Ti Id")] - response_content: Annotated[str, Field(title="Response Content")] + response_content: Annotated[list[str], Field(title="Response Content")] params_input: Annotated[dict[str, Any] | None, Field(title="Params Input")] = None type: Annotated[Literal["UpdateHITLResponse"] | None, Field(title="Type")] = "UpdateHITLResponse" diff --git a/task-sdk/src/airflow/sdk/execution_time/hitl.py b/task-sdk/src/airflow/sdk/execution_time/hitl.py index 9fb2184f29cfd..da46da43a9756 100644 --- a/task-sdk/src/airflow/sdk/execution_time/hitl.py +++ b/task-sdk/src/airflow/sdk/execution_time/hitl.py @@ -17,7 +17,6 @@ from __future__ import annotations -from collections.abc import MutableMapping from typing import TYPE_CHECKING, Any from uuid import UUID @@ -57,8 +56,8 @@ def add_hitl_response( def update_htil_response_content_detail( ti_id: UUID, - response_content: str, - params_input: MutableMapping | None = None, + response_content: list[str], + params_input: dict[str, Any], ) -> HITLResponseContentDetail: from airflow.sdk.execution_time.task_runner import SUPERVISOR_COMMS diff --git a/task-sdk/tests/task_sdk/api/test_client.py b/task-sdk/tests/task_sdk/api/test_client.py index 7fa17ba1c4d38..1aa55f9c5e2ef 100644 --- a/task-sdk/tests/task_sdk/api/test_client.py +++ b/task-sdk/tests/task_sdk/api/test_client.py @@ -19,12 +19,15 @@ import json import pickle +from datetime import datetime +from typing import TYPE_CHECKING from unittest import mock import httpx import pytest import uuid6 from task_sdk import make_client, make_client_w_dry_run, make_client_w_responses +from uuid6 import uuid7 from airflow.sdk.api.client import RemoteValidationError, ServerResponseError from airflow.sdk.api.datamodels._generated import ( @@ -33,6 +36,7 @@ ConnectionResponse, DagRunState, DagRunStateResponse, + HITLResponseContentDetail, VariableResponse, XComResponse, ) @@ -40,6 +44,7 @@ from airflow.sdk.execution_time.comms import ( DeferTask, ErrorResponse, + HITLInputRequestResponseResult, OKResponse, RescheduleTask, TaskRescheduleStartDate, @@ -47,6 +52,9 @@ from airflow.utils import timezone from airflow.utils.state import TerminalTIState +if TYPE_CHECKING: + from time_machine import TimeMachineFixture + class TestClient: @pytest.mark.parametrize( @@ -1150,3 +1158,101 @@ def handle_request(request: httpx.Request) -> httpx.Response: assert isinstance(result, TaskRescheduleStartDate) assert result.start_date == "2024-01-01T00:00:00Z" + + +class TestHITLOperations: + def test_add_response(self) -> None: + ti_id = uuid7() + + def handle_request(request: httpx.Request) -> httpx.Response: + if request.url.path in (f"/hitl-responses/{ti_id}"): + return httpx.Response( + status_code=201, + json={ + "ti_id": str(ti_id), + "options": ["Approval", "Reject"], + "subject": "This is subject", + "body": "This is body", + "default": ["Approval"], + "params": None, + "multiple": False, + }, + ) + return httpx.Response(status_code=400, json={"detail": "Bad Request"}) + + client = make_client(transport=httpx.MockTransport(handle_request)) + result = client.hitl.add_response( + ti_id=ti_id, + options=["Approval", "Reject"], + subject="This is subject", + body="This is body", + default=["Approval"], + params=None, + multiple=False, + ) + assert isinstance(result, HITLInputRequestResponseResult) + assert result.ti_id == ti_id + assert result.options == ["Approval", "Reject"] + assert result.subject == "This is subject" + assert result.body == "This is body" + assert result.default == ["Approval"] + assert result.params is None + assert result.multiple is False + + def test_update_response(self, time_machine: TimeMachineFixture) -> None: + time_machine.move_to(datetime(2025, 7, 3, 0, 0, 0)) + ti_id = uuid7() + + def handle_request(request: httpx.Request) -> httpx.Response: + if request.url.path in (f"/hitl-responses/{ti_id}"): + return httpx.Response( + status_code=200, + json={ + "response_content": ["Approval"], + "params_input": {}, + "user_id": "admin", + "response_received": True, + "response_at": "2025-07-03T00:00:00Z", + }, + ) + return httpx.Response(status_code=400, json={"detail": "Bad Request"}) + + client = make_client(transport=httpx.MockTransport(handle_request)) + result = client.hitl.update_response( + ti_id=ti_id, + response_content=["Approve"], + params_input={}, + ) + assert isinstance(result, HITLResponseContentDetail) + assert result.response_received is True + assert result.response_content == ["Approval"] + assert result.params_input == {} + assert result.user_id == "admin" + assert result.response_at == timezone.datetime(2025, 7, 3, 0, 0, 0) + + def test_get_response_content_detail(self, time_machine: TimeMachineFixture) -> None: + time_machine.move_to(datetime(2025, 7, 3, 0, 0, 0)) + ti_id = uuid7() + + def handle_request(request: httpx.Request) -> httpx.Response: + if request.url.path in (f"/hitl-responses/{ti_id}"): + return httpx.Response( + status_code=200, + json={ + "response_content": ["Approval"], + "params_input": {}, + "user_id": "admin", + "response_received": True, + "response_at": "2025-07-03T00:00:00Z", + }, + ) + return httpx.Response(status_code=400, json={"detail": "Bad Request"}) + + client = make_client(transport=httpx.MockTransport(handle_request)) + result = client.hitl.get_response_content_detail(ti_id=ti_id) + assert isinstance(result, HITLResponseContentDetail) + assert result.response_received is True + assert result.response_content == ["Approval"] + assert result.params_input == {} + assert result.user_id == "admin" + assert result.response_at == timezone.datetime(2025, 7, 3, 0, 0, 0) From c75afe49a6ae5e8b8a46bbe46cb4991c814ebced Mon Sep 17 00:00:00 2001 From: Wei Lee Date: Tue, 8 Jul 2025 00:00:54 +0800 Subject: [PATCH 11/30] test(hitl): improve supervisor.handle_request --- .../execution_time/test_supervisor.py | 45 +++++++++++++++++++ 1 file changed, 45 insertions(+) diff --git a/task-sdk/tests/task_sdk/execution_time/test_supervisor.py b/task-sdk/tests/task_sdk/execution_time/test_supervisor.py index 67f565ff9f4c1..8a045f8a701f8 100644 --- a/task-sdk/tests/task_sdk/execution_time/test_supervisor.py +++ b/task-sdk/tests/task_sdk/execution_time/test_supervisor.py @@ -60,6 +60,7 @@ AssetResult, CommsDecoder, ConnectionResult, + CreateHITLResponsePayload, DagRunStateResult, DeferTask, DeleteVariable, @@ -81,6 +82,7 @@ GetXCom, GetXComSequenceItem, GetXComSequenceSlice, + HITLInputRequestResponseResult, InactiveAssetsResult, OKResponse, PrevSuccessfulDagRunResult, @@ -1770,6 +1772,49 @@ def watched_subprocess(self, mocker): None, id="get_xcom_seq_slice", ), + pytest.param( + CreateHITLResponsePayload( + ti_id=TI_ID, + options=["Approve", "Reject"], + subject="This is subject", + body="This is body", + default=["Approve"], + multiple=False, + params={}, + ), + { + "ti_id": str(TI_ID), + "options": ["Approve", "Reject"], + "subject": "This is subject", + "body": "This is body", + "default": ["Approve"], + "multiple": False, + "params": {}, + "type": "HITLInputRequestResponseResult", + }, + "hitl.add_response", + (), + { + "body": "This is body", + "default": ["Approve"], + "multiple": False, + "options": ["Approve", "Reject"], + "params": {}, + "subject": "This is subject", + "ti_id": TI_ID, + }, + HITLInputRequestResponseResult( + ti_id=TI_ID, + options=["Approve", "Reject"], + subject="This is subject", + body="This is body", + default=["Approve"], + multiple=False, + params={}, + ), + None, + id="create_hitl_response_payload", + ), ], ) def test_handle_requests( From 2d57bceda4a3f55e5c85cb77f986819eb85451c0 Mon Sep 17 00:00:00 2001 From: Wei Lee Date: Tue, 8 Jul 2025 00:34:00 +0800 Subject: [PATCH 12/30] test(hitl): add execution_time test cases --- .../task_sdk/execution_time/test_hitl.py | 95 +++++++++++++++++++ 1 file changed, 95 insertions(+) create mode 100644 task-sdk/tests/task_sdk/execution_time/test_hitl.py diff --git a/task-sdk/tests/task_sdk/execution_time/test_hitl.py b/task-sdk/tests/task_sdk/execution_time/test_hitl.py new file mode 100644 index 0000000000000..2ff161969912f --- /dev/null +++ b/task-sdk/tests/task_sdk/execution_time/test_hitl.py @@ -0,0 +1,95 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from uuid6 import uuid7 + +from airflow.sdk.api.datamodels._generated import HITLResponseContentDetail +from airflow.sdk.execution_time.comms import CreateHITLResponsePayload +from airflow.sdk.execution_time.hitl import ( + add_hitl_response, + get_hitl_response_content_detail, + update_htil_response_content_detail, +) +from airflow.utils import timezone + +TI_ID = uuid7() + + +def test_add_hitl_response(mock_supervisor_comms) -> None: + add_hitl_response( + ti_id=TI_ID, + options=["Approve", "Reject"], + subject="Subject", + body="Optional body", + default=["Approve", "Reject"], + params={"input_1": 1}, + multiple=False, + ) + mock_supervisor_comms.send.assert_called_with( + msg=CreateHITLResponsePayload( + ti_id=TI_ID, + options=["Approve", "Reject"], + subject="Subject", + body="Optional body", + default=["Approve", "Reject"], + params={"input_1": 1}, + multiple=False, + ) + ) + + +def test_update_htil_response_content_detail(mock_supervisor_comms) -> None: + timestamp = timezone.utcnow() + mock_supervisor_comms.send.return_value = HITLResponseContentDetail( + response_received=True, + response_content=["Approve"], + response_at=timestamp, + user_id="admin", + params_input={"input_1": 1}, + ) + resp = update_htil_response_content_detail( + ti_id=TI_ID, + response_content=["Approve"], + params_input={"input_1": 1}, + ) + assert resp == HITLResponseContentDetail( + response_received=True, + response_content=["Approve"], + response_at=timestamp, + user_id="admin", + params_input={"input_1": 1}, + ) + + +def test_get_hitl_response_content_detail(mock_supervisor_comms) -> None: + mock_supervisor_comms.send.return_value = HITLResponseContentDetail( + response_received=False, + response_content=None, + response_at=None, + user_id=None, + params_input={}, + ) + resp = get_hitl_response_content_detail(TI_ID) + assert resp == HITLResponseContentDetail( + response_received=False, + response_content=None, + response_at=None, + user_id=None, + params_input={}, + ) From c70d69dd638d460d38c4500bd47c803352f25d03 Mon Sep 17 00:00:00 2001 From: Wei Lee Date: Tue, 8 Jul 2025 17:09:21 +0800 Subject: [PATCH 13/30] test(hitl): add test cases to public api --- .../api_fastapi/core_api/datamodels/hitl.py | 10 +- .../core_api/routes/public/hitl.py | 7 +- .../core_api/routes/public/test_hitl.py | 202 ++++++++++++++++++ 3 files changed, 209 insertions(+), 10 deletions(-) create mode 100644 airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_hitl.py diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/hitl.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/hitl.py index aba82ec6ec73f..53ed05927b441 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/hitl.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/hitl.py @@ -18,10 +18,12 @@ from collections.abc import Mapping from datetime import datetime +from typing import Any from pydantic import Field, field_validator from airflow.api_fastapi.core_api.base import BaseModel +from airflow.sdk import Param class UpdateHITLResponsePayload(BaseModel): @@ -51,21 +53,21 @@ class HITLResponseDetail(BaseModel): body: str | None = None default: list[str] | None = None multiple: bool = False - params: Mapping = Field(default_factory=dict) + params: dict[str, Any] = Field(default_factory=dict) # Response Content Detail user_id: str | None = None response_at: datetime | None = None response_content: list[str] | None = None - params_input: Mapping = Field(default_factory=dict) + params_input: dict[str, Any] = Field(default_factory=dict) response_received: bool = False @field_validator("params", mode="before") @classmethod - def get_params(cls, params: Mapping) -> Mapping: + def get_params(cls, params: dict[str, Any]) -> dict[str, Any]: """Convert params attribute to dict representation.""" - return {k: v.dump() for k, v in params.items()} + return {k: v.dump() if isinstance(v, Param) else v for k, v in params.items()} class HITLResponseDetailCollection(BaseModel): diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/hitl.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/hitl.py index fc01e29e15518..603ec7e01bf4a 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/hitl.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/hitl.py @@ -90,12 +90,7 @@ def update_hitl_response( @hitl_router.get( "/{task_instance_id}", status_code=status.HTTP_200_OK, - responses=create_openapi_http_exception_doc( - [ - status.HTTP_404_NOT_FOUND, - status.HTTP_409_CONFLICT, - ] - ), + responses=create_openapi_http_exception_doc([status.HTTP_404_NOT_FOUND]), dependencies=[ Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.TASK_INSTANCE)), ], diff --git a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_hitl.py b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_hitl.py new file mode 100644 index 0000000000000..f02f32d2def57 --- /dev/null +++ b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_hitl.py @@ -0,0 +1,202 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import pytest + +from tests_common.test_utils.db import AIRFLOW_V_3_1_PLUS + +if not AIRFLOW_V_3_1_PLUS: + pytest.skip("Human in the loop public API compatible with Airflow >= 3.0.1", allow_module_level=True) + +from datetime import datetime +from typing import TYPE_CHECKING, Any + +import time_machine +from uuid6 import uuid7 + +from airflow.models.hitl import HITLResponseModel + +if TYPE_CHECKING: + from airflow.models.taskinstance import TaskInstance + + +pytestmark = pytest.mark.db_test +TI_ID = uuid7() + + +@pytest.fixture +def sample_ti(create_task_instance) -> TaskInstance: + return create_task_instance() + + +@pytest.fixture +def sample_hitl_response(session, sample_ti) -> HITLResponseModel: + hitl_response_model = HITLResponseModel( + ti_id=sample_ti.id, + options=["Approve", "Reject"], + subject="This is subject", + body="this is body", + default=["Approve"], + multiple=False, + params={"input_1": 1}, + ) + session.add(hitl_response_model) + session.commit() + + return hitl_response_model + + +@pytest.fixture +def expected_sample_hitl_response_dict(sample_ti) -> dict[str, Any]: + return { + "body": "this is body", + "default": ["Approve"], + "multiple": False, + "options": ["Approve", "Reject"], + "params": {"input_1": 1}, + "params_input": {}, + "response_at": None, + "response_content": None, + "response_received": False, + "subject": "This is subject", + "ti_id": sample_ti.id, + "user_id": None, + } + + +class TestUpdateHITLResponseEndpoint: + @time_machine.travel(datetime(2025, 7, 3, 0, 0, 0), tick=False) + @pytest.mark.usefixtures("sample_hitl_response") + def test_should_respond_200_with_existing_response(self, test_client, sample_ti): + response = test_client.patch( + f"/hitl-responses/{sample_ti.id}", + json={"response_content": ["Approve"], "params_input": {"input_1": 2}}, + ) + + assert response.status_code == 200 + assert response.json() == { + "params_input": {"input_1": 2}, + "response_content": ["Approve"], + "user_id": "test", + "response_at": "2025-07-03T00:00:00Z", + } + + def test_should_respond_404(self, test_client, sample_ti): + response = test_client.get(f"/hitl-responses/{sample_ti.id}") + assert response.status_code == 404 + assert response.json() == { + "detail": { + "message": "Human-in-the-loop response not found", + "reason": "not_found", + }, + } + + @time_machine.travel(datetime(2025, 7, 3, 0, 0, 0), tick=False) + @pytest.mark.usefixtures("sample_hitl_response") + def test_should_respond_409(self, test_client, sample_ti, expected_sample_hitl_response_dict): + response = test_client.patch( + f"/hitl-responses/{sample_ti.id}", + json={"response_content": ["Approve"], "params_input": {"input_1": 2}}, + ) + + expected_response = { + "params_input": {"input_1": 2}, + "response_content": ["Approve"], + "user_id": "test", + "response_at": "2025-07-03T00:00:00Z", + } + assert response.status_code == 200 + assert response.json() == expected_response + + response = test_client.patch( + f"/hitl-responses/{sample_ti.id}", + json={"response_content": ["Approve"], "params_input": {"input_1": 2}}, + ) + assert response.status_code == 409 + assert response.json() == { + "detail": ( + "Human-in-the-loop Response has already been updated for Task Instance " + f"with id {sample_ti.id} " + "and is not allowed to write again." + ) + } + + def test_should_respond_401(self, unauthenticated_test_client, sample_ti): + response = unauthenticated_test_client.get(f"/hitl-responses/{sample_ti.id}") + assert response.status_code == 401 + + def test_should_respond_403(self, unauthorized_test_client, sample_ti): + response = unauthorized_test_client.get(f"/hitl-responses/{sample_ti.id}") + assert response.status_code == 403 + + +class TestGetHITLResponseEndpoint: + @pytest.mark.usefixtures("sample_hitl_response") + def test_should_respond_200_with_existing_response( + self, test_client, sample_ti, expected_sample_hitl_response_dict + ): + response = test_client.get(f"/hitl-responses/{sample_ti.id}") + assert response.status_code == 200 + assert response.json() == expected_sample_hitl_response_dict + + def test_should_respond_404(self, test_client, sample_ti): + response = test_client.get(f"/hitl-responses/{sample_ti.id}") + assert response.status_code == 404 + assert response.json() == { + "detail": { + "message": "Human-in-the-loop response not found", + "reason": "not_found", + }, + } + + def test_should_respond_401(self, unauthenticated_test_client, sample_ti): + response = unauthenticated_test_client.get(f"/hitl-responses/{sample_ti.id}") + assert response.status_code == 401 + + def test_should_respond_403(self, unauthorized_test_client, sample_ti): + response = unauthorized_test_client.get(f"/hitl-responses/{sample_ti.id}") + assert response.status_code == 403 + + +class TestGetHITLResponsesEndpoint: + @pytest.mark.usefixtures("sample_hitl_response") + def test_should_respond_200_with_existing_response( + self, test_client, sample_ti, expected_sample_hitl_response_dict + ): + response = test_client.get("/hitl-responses/") + assert response.status_code == 200 + assert response.json() == { + "hitl_responses": [expected_sample_hitl_response_dict], + "total_entries": 1, + } + + def test_should_respond_200_without_response(self, test_client): + response = test_client.get("/hitl-responses/") + assert response.status_code == 200 + assert response.json() == { + "hitl_responses": [], + "total_entries": 0, + } + + def test_should_respond_401(self, unauthenticated_test_client): + response = unauthenticated_test_client.get("/hitl-responses/") + assert response.status_code == 401 + + def test_should_respond_403(self, unauthorized_test_client): + response = unauthorized_test_client.get("/hitl-responses/") + assert response.status_code == 403 From 21cd2fb0d66f77549acca595a06df5ad1f3a544d Mon Sep 17 00:00:00 2001 From: Wei Lee Date: Tue, 8 Jul 2025 17:24:18 +0800 Subject: [PATCH 14/30] refactor(hitl): replace ImportError with AirflowOptionalProviderFeatureException --- .../standard/src/airflow/providers/standard/operators/hitl.py | 3 ++- .../standard/src/airflow/providers/standard/triggers/hitl.py | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/providers/standard/src/airflow/providers/standard/operators/hitl.py b/providers/standard/src/airflow/providers/standard/operators/hitl.py index 28e78638d6830..59c35f44db693 100644 --- a/providers/standard/src/airflow/providers/standard/operators/hitl.py +++ b/providers/standard/src/airflow/providers/standard/operators/hitl.py @@ -16,10 +16,11 @@ # under the License. from __future__ import annotations +from airflow.exceptions import AirflowOptionalProviderFeatureException from airflow.providers.standard.version_compat import AIRFLOW_V_3_1_PLUS if not AIRFLOW_V_3_1_PLUS: - raise ImportError("Human in the loop functionality needs Airflow 3.1+.") + raise AirflowOptionalProviderFeatureException("Human in the loop functionality needs Airflow 3.1+.") from collections.abc import Collection, Mapping diff --git a/providers/standard/src/airflow/providers/standard/triggers/hitl.py b/providers/standard/src/airflow/providers/standard/triggers/hitl.py index f61a9e4960285..fe04b9614e7cb 100644 --- a/providers/standard/src/airflow/providers/standard/triggers/hitl.py +++ b/providers/standard/src/airflow/providers/standard/triggers/hitl.py @@ -16,10 +16,11 @@ # under the License. from __future__ import annotations +from airflow.exceptions import AirflowOptionalProviderFeatureException from airflow.providers.standard.version_compat import AIRFLOW_V_3_1_PLUS if not AIRFLOW_V_3_1_PLUS: - raise ImportError("Human in the loop functionality needs Airflow 3.1+.") + raise AirflowOptionalProviderFeatureException("Human in the loop functionality needs Airflow 3.1+.") import asyncio from collections.abc import AsyncIterator From 2a758f299fc5ca01c1b06f370d105c9055121de4 Mon Sep 17 00:00:00 2001 From: Wei Lee Date: Tue, 8 Jul 2025 17:35:57 +0800 Subject: [PATCH 15/30] refactor(hitl): remove unnecessay directories --- .../providers/standard/api_fastapi/__init__.py | 16 ---------------- .../standard/api_fastapi/core_api/__init__.py | 16 ---------------- .../api_fastapi/core_api/datamodels/__init__.py | 16 ---------------- 3 files changed, 48 deletions(-) delete mode 100644 providers/standard/src/airflow/providers/standard/api_fastapi/__init__.py delete mode 100644 providers/standard/src/airflow/providers/standard/api_fastapi/core_api/__init__.py delete mode 100644 providers/standard/src/airflow/providers/standard/api_fastapi/core_api/datamodels/__init__.py diff --git a/providers/standard/src/airflow/providers/standard/api_fastapi/__init__.py b/providers/standard/src/airflow/providers/standard/api_fastapi/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/standard/src/airflow/providers/standard/api_fastapi/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/standard/src/airflow/providers/standard/api_fastapi/core_api/__init__.py b/providers/standard/src/airflow/providers/standard/api_fastapi/core_api/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/standard/src/airflow/providers/standard/api_fastapi/core_api/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/standard/src/airflow/providers/standard/api_fastapi/core_api/datamodels/__init__.py b/providers/standard/src/airflow/providers/standard/api_fastapi/core_api/datamodels/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/standard/src/airflow/providers/standard/api_fastapi/core_api/datamodels/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. From 8c2a59faf491e46f10c4a69ad9de7904f5c4db10 Mon Sep 17 00:00:00 2001 From: Wei Lee Date: Tue, 8 Jul 2025 18:51:18 +0800 Subject: [PATCH 16/30] test(hitl): add test cases to execution api --- .../execution_api/versions/head/test_hitl.py | 140 ++++++++++++++++++ 1 file changed, 140 insertions(+) create mode 100644 airflow-core/tests/unit/api_fastapi/execution_api/versions/head/test_hitl.py diff --git a/airflow-core/tests/unit/api_fastapi/execution_api/versions/head/test_hitl.py b/airflow-core/tests/unit/api_fastapi/execution_api/versions/head/test_hitl.py new file mode 100644 index 0000000000000..d4fcba9879cd1 --- /dev/null +++ b/airflow-core/tests/unit/api_fastapi/execution_api/versions/head/test_hitl.py @@ -0,0 +1,140 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from datetime import datetime + +import pytest +import time_machine +from uuid6 import uuid7 + +from tests_common.test_utils.db import AIRFLOW_V_3_1_PLUS + +if not AIRFLOW_V_3_1_PLUS: + pytest.skip("Human in the loop public API compatible with Airflow >= 3.0.1", allow_module_level=True) + +from typing import TYPE_CHECKING, Any + +from airflow.models.hitl import HITLResponseModel + +if TYPE_CHECKING: + from airflow.models.taskinstance import TaskInstance + +pytestmark = pytest.mark.db_test +TI_ID = uuid7() + + +@pytest.fixture +def sample_ti(create_task_instance) -> TaskInstance: + return create_task_instance() + + +@pytest.fixture +def sample_hitl_response(session, sample_ti) -> HITLResponseModel: + hitl_response_model = HITLResponseModel( + ti_id=sample_ti.id, + options=["Approve", "Reject"], + subject="This is subject", + body="this is body", + default=["Approve"], + multiple=False, + params={"input_1": 1}, + ) + session.add(hitl_response_model) + session.commit() + + return hitl_response_model + + +@pytest.fixture +def expected_sample_hitl_response_dict(sample_ti) -> dict[str, Any]: + return { + "body": "this is body", + "default": ["Approve"], + "multiple": False, + "options": ["Approve", "Reject"], + "params": {"input_1": 1}, + "params_input": {}, + "response_at": None, + "response_content": None, + "response_received": False, + "subject": "This is subject", + "ti_id": sample_ti.id, + "user_id": None, + } + + +def test_add_hitl_response(client, create_task_instance, session) -> None: + ti = create_task_instance() + session.commit() + + response = client.post( + f"/execution/hitl-responses/{ti.id}", + json={ + "ti_id": ti.id, + "options": ["Approve", "Reject"], + "subject": "This is subject", + "body": "this is body", + "default": ["Approve"], + "multiple": False, + "params": {"input_1": 1}, + }, + ) + assert response.status_code == 201 + assert response.json() == { + "ti_id": ti.id, + "options": ["Approve", "Reject"], + "subject": "This is subject", + "body": "this is body", + "default": ["Approve"], + "multiple": False, + "params": {"input_1": 1}, + } + + +@time_machine.travel(datetime(2025, 7, 3, 0, 0, 0), tick=False) +@pytest.mark.usefixtures("sample_hitl_response") +def test_update_hitl_response(client, sample_ti) -> None: + response = client.patch( + f"/execution/hitl-responses/{sample_ti.id}", + json={ + "ti_id": sample_ti.id, + "response_content": ["Reject"], + "params_input": {"input_1": 2}, + }, + ) + assert response.status_code == 200 + assert response.json() == { + "params_input": {"input_1": 2}, + "response_at": "2025-07-03T00:00:00Z", + "response_content": ["Reject"], + "response_received": True, + "user_id": "Fallback to default", + } + + +@pytest.mark.usefixtures("sample_hitl_response") +def test_get_hitl_response(client, sample_ti) -> None: + response = client.get(f"/execution/hitl-responses/{sample_ti.id}") + assert response.status_code == 200 + assert response.json() == { + "params_input": {}, + "response_at": None, + "response_content": None, + "response_received": False, + "user_id": None, + } From 4e5696ac58dc001d2be25315342d0d777bb13d5b Mon Sep 17 00:00:00 2001 From: Wei Lee Date: Tue, 8 Jul 2025 19:43:47 +0800 Subject: [PATCH 17/30] test(hitl): add test cases to operators --- .../providers/standard/operators/hitl.py | 6 +- .../unit/standard/operators/test_hitl.py | 203 ++++++++++++++++++ 2 files changed, 205 insertions(+), 4 deletions(-) diff --git a/providers/standard/src/airflow/providers/standard/operators/hitl.py b/providers/standard/src/airflow/providers/standard/operators/hitl.py index 59c35f44db693..890d1473a6da5 100644 --- a/providers/standard/src/airflow/providers/standard/operators/hitl.py +++ b/providers/standard/src/airflow/providers/standard/operators/hitl.py @@ -165,8 +165,7 @@ class ApprovalOperator(HITLOperator): def __init__(self, **kwargs) -> None: if "options" in kwargs: - kwargs.pop("options") - self.log.warning("Passing options into ApprovalOperator will be ignored.") + raise ValueError("Passing options to ApprovalOperator is not allowed.") super().__init__(options=["Approve", "Reject"], **kwargs) @@ -179,8 +178,7 @@ class HITLTerminationOperator(HITLOperator, SkipMixin): def __init__(self, **kwargs) -> None: if "options" in kwargs: - kwargs.pop("options") - self.log.warning("Passing options into ApprovalOperator will be ignored.") + raise ValueError("Passing options to HITLTerminationOperator is not allowed.") super().__init__(options=["Stop", "Proceed"], **kwargs) def execute_complete(self, context: Context, event: dict[str, Any]) -> None: diff --git a/providers/standard/tests/unit/standard/operators/test_hitl.py b/providers/standard/tests/unit/standard/operators/test_hitl.py index 13a83393a9124..a9d565bacf793 100644 --- a/providers/standard/tests/unit/standard/operators/test_hitl.py +++ b/providers/standard/tests/unit/standard/operators/test_hitl.py @@ -14,3 +14,206 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +from __future__ import annotations + +import pytest +from sqlalchemy import select + +from airflow.exceptions import DownstreamTasksSkipped +from airflow.models import Trigger +from airflow.models.hitl import HITLResponseModel +from airflow.providers.standard.operators.hitl import ( + ApprovalOperator, + HITLEntryOperator, + HITLOperator, + HITLTerminationOperator, +) +from airflow.providers.standard.triggers.hitl import HITLTriggerEventSuccessPayload +from airflow.sdk import Param +from airflow.sdk.definitions.param import ParamsDict + +pytestmark = pytest.mark.db_test + + +class TestHITLOperator: + def test_validate_defaults(self) -> None: + hitl_op = HITLOperator( + task_id="hitl_test", + subject="This is subject", + options=["1", "2", "3", "4", "5"], + body="This is body", + default=["1"], + multiple=False, + params=ParamsDict({"input_1": 1}), + ) + hitl_op.validate_defaults() + + @pytest.mark.parametrize( + "extra_kwargs", + [ + {"default": None, "execution_timeout": 10}, + {"default": ["0"]}, + {"multiple": False, "default": ["1", "2"]}, + ], + ids=["timeout with no default", "default not in option", "multiple default when multiple is False"], + ) + def test_validate_defaults_with_invalid_defaults(self, extra_kwargs) -> None: + with pytest.raises(ValueError): + HITLOperator( + task_id="hitl_test", + subject="This is subject", + body="This is body", + options=["1", "2", "3", "4", "5"], + params=ParamsDict({"input_1": 1}), + **extra_kwargs, + ) + + def test_execute(self, dag_maker, session) -> None: + with dag_maker("test_dag"): + task = HITLOperator( + task_id="hitl_test", + subject="This is subject", + options=["1", "2", "3", "4", "5"], + body="This is body", + default=["1"], + multiple=False, + params=ParamsDict({"input_1": 1}), + ) + dr = dag_maker.create_dagrun() + ti = dag_maker.run_ti(task.task_id, dr) + + hitl_response_model = session.scalar( + select(HITLResponseModel).where(HITLResponseModel.ti_id == ti.id) + ) + assert hitl_response_model.ti_id == ti.id + assert hitl_response_model.subject == "This is subject" + assert hitl_response_model.options == ["1", "2", "3", "4", "5"] + assert hitl_response_model.body == "This is body" + assert hitl_response_model.default == ["1"] + assert hitl_response_model.multiple is False + assert hitl_response_model.params == {"input_1": 1} + assert hitl_response_model.response_at is None + assert hitl_response_model.user_id is None + assert hitl_response_model.response_content is None + assert hitl_response_model.params_input == {} + + registered_trigger = session.scalar( + select(Trigger).where(Trigger.classpath == "airflow.providers.standard.triggers.hitl.HITLTrigger") + ) + assert registered_trigger.kwargs == { + "ti_id": ti.id, + "options": ["1", "2", "3", "4", "5"], + "default": ["1"], + "params": {"input_1": 1}, + "multiple": False, + "timeout_datetime": None, + "poke_interval": 5.0, + } + + @pytest.mark.parametrize( + "input_params, expected_params", + [ + (ParamsDict({"input": 1}), {"input": 1}), + ({"input": Param(5, type="integer", minimum=3)}, {"input": 5}), + (None, {}), + ], + ) + def test_serialzed_params(self, input_params, expected_params) -> None: + hitl_op = HITLOperator( + task_id="hitl_test", + subject="This is subject", + body="This is body", + options=["1", "2", "3", "4", "5"], + params=input_params, + ) + assert hitl_op.serialzed_params == expected_params + + def test_execute_complete(self) -> None: + hitl_op = HITLOperator( + task_id="hitl_test", + subject="This is subject", + body="This is body", + options=["1", "2", "3", "4", "5"], + params={"input": 1}, + ) + + ret = hitl_op.execute_complete( + context={}, + event=HITLTriggerEventSuccessPayload(response_content=["1"], params_input={"input": 2}), + ) + + assert ret["response_content"] == ["1"] + assert ret["params_input"] == {"input": 2} + + def test_validate_response_content_with_invalid_content(self) -> None: + hitl_op = HITLOperator( + task_id="hitl_test", + subject="This is subject", + body="This is body", + options=["1", "2", "3", "4", "5"], + params={"input": 1}, + ) + + with pytest.raises(ValueError): + hitl_op.execute_complete( + context={}, + event=HITLTriggerEventSuccessPayload( + response_content=["not exists"], + params_input={"input": 2}, + ), + ) + + def test_validate_params_input_with_invalid_input(self) -> None: + hitl_op = HITLOperator( + task_id="hitl_test", + subject="This is subject", + body="This is body", + options=["1", "2", "3", "4", "5"], + params={"input": 1}, + ) + + with pytest.raises(ValueError): + hitl_op.execute_complete( + context={}, + event=HITLTriggerEventSuccessPayload( + response_content=["1"], + params_input={"no such key": 2, "input": 333}, + ), + ) + + +class TestApprovalOperator: + def test_init(self): + with pytest.raises(ValueError): + ApprovalOperator( + task_id="hitl_test", + subject="This is subject", + body="This is body", + options=["1", "2", "3", "4", "5"], + params={"input": 1}, + ) + + +class TestHITLTerminationOperator: + def test_init(self): + with pytest.raises(ValueError): + HITLTerminationOperator( + task_id="hitl_test", + subject="This is subject", + body="This is body", + options=["1", "2", "3", "4", "5"], + params={"input": 1}, + ) + + +class TestHITLEntryOperator: + def test_init(self): + op = HITLEntryOperator( + task_id="hitl_test", + subject="This is subject", + body="This is body", + params={"input": 1}, + ) + + assert op.options == ["OK"] + assert op.default == ["OK"] From 63f850cc3fc96eeb5bc94d8a3b8684052fce7db3 Mon Sep 17 00:00:00 2001 From: Wei Lee Date: Tue, 8 Jul 2025 22:03:49 +0800 Subject: [PATCH 18/30] style: fix mypy warning --- .../standard/src/airflow/providers/standard/operators/hitl.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/providers/standard/src/airflow/providers/standard/operators/hitl.py b/providers/standard/src/airflow/providers/standard/operators/hitl.py index 890d1473a6da5..12ba5b1413214 100644 --- a/providers/standard/src/airflow/providers/standard/operators/hitl.py +++ b/providers/standard/src/airflow/providers/standard/operators/hitl.py @@ -63,7 +63,7 @@ def __init__( body: str | None = None, default: str | list[str] | None = None, multiple: bool = False, - params: ParamsDict | None = None, + params: ParamsDict | dict[str, Any] | None = None, **kwargs, ) -> None: super().__init__(**kwargs) From b06e18142769dc78f0f23399d2155be7f6bb782b Mon Sep 17 00:00:00 2001 From: Wei Lee Date: Tue, 8 Jul 2025 22:59:28 +0800 Subject: [PATCH 19/30] ci: fix ci errors --- airflow-core/docs/img/airflow_erd.sha256 | 2 +- airflow-core/docs/img/airflow_erd.svg | 3381 +++++++++-------- .../openapi/v2-rest-api-generated.yaml | 6 - .../unit/standard/operators/test_hitl.py | 25 +- .../tests/unit/standard/triggers/test_hitl.py | 2 +- 5 files changed, 1735 insertions(+), 1681 deletions(-) diff --git a/airflow-core/docs/img/airflow_erd.sha256 b/airflow-core/docs/img/airflow_erd.sha256 index a5bf22a4f9d99..4a770256463ab 100644 --- a/airflow-core/docs/img/airflow_erd.sha256 +++ b/airflow-core/docs/img/airflow_erd.sha256 @@ -1 +1 @@ -7ac92245e16093fd93a67d14653f14d125867fe6c5e3f0ad94c44fb572e6facf \ No newline at end of file +98caac0163253608fb8aa38c3fe5185070c39f3af3c0887bb433c597cb0875fe \ No newline at end of file diff --git a/airflow-core/docs/img/airflow_erd.svg b/airflow-core/docs/img/airflow_erd.svg index 5565970e5573f..0b725621a54f4 100644 --- a/airflow-core/docs/img/airflow_erd.svg +++ b/airflow-core/docs/img/airflow_erd.svg @@ -4,11 +4,11 @@ - - + + %3 - + dag_priority_parsing_request @@ -247,2027 +247,2082 @@ [INTEGER] NOT NULL - + +hitl_response + +hitl_response + +ti_id + + [UUID] + NOT NULL + +body + + [TEXT] + +default + + [JSON] + +multiple + + [BOOLEAN] + +options + + [JSON] + NOT NULL + +params + + [JSON] + NOT NULL + +params_input + + [JSON] + NOT NULL + +response_at + + [TIMESTAMP] + +response_content + + [JSON] + +subject + + [TEXT] + NOT NULL + +user_id + + [VARCHAR(128)] + + + slot_pool - -slot_pool - -id - - [INTEGER] - NOT NULL - -description - - [TEXT] - -include_deferred - - [BOOLEAN] - NOT NULL - -pool - - [VARCHAR(256)] - -slots - - [INTEGER] + +slot_pool + +id + + [INTEGER] + NOT NULL + +description + + [TEXT] + +include_deferred + + [BOOLEAN] + NOT NULL + +pool + + [VARCHAR(256)] + +slots + + [INTEGER] - + import_error - -import_error - -id - - [INTEGER] - NOT NULL - -bundle_name - - [VARCHAR(250)] - -filename - - [VARCHAR(1024)] - -stacktrace - - [TEXT] - -timestamp - - [TIMESTAMP] + +import_error + +id + + [INTEGER] + NOT NULL + +bundle_name + + [VARCHAR(250)] + +filename + + [VARCHAR(1024)] + +stacktrace + + [TEXT] + +timestamp + + [TIMESTAMP] - + asset_alias - -asset_alias - -id - - [INTEGER] - NOT NULL - -group - - [VARCHAR(1500)] - NOT NULL - -name - - [VARCHAR(1500)] - NOT NULL + +asset_alias + +id + + [INTEGER] + NOT NULL + +group + + [VARCHAR(1500)] + NOT NULL + +name + + [VARCHAR(1500)] + NOT NULL - + asset_alias_asset - -asset_alias_asset - -alias_id - - [INTEGER] - NOT NULL - -asset_id - - [INTEGER] - NOT NULL + +asset_alias_asset + +alias_id + + [INTEGER] + NOT NULL + +asset_id + + [INTEGER] + NOT NULL asset_alias--asset_alias_asset - -0..N -1 + +0..N +1 - + asset_alias_asset_event - -asset_alias_asset_event - -alias_id - - [INTEGER] - NOT NULL - -event_id - - [INTEGER] - NOT NULL + +asset_alias_asset_event + +alias_id + + [INTEGER] + NOT NULL + +event_id + + [INTEGER] + NOT NULL asset_alias--asset_alias_asset_event - -0..N -1 + +0..N +1 - + dag_schedule_asset_alias_reference - -dag_schedule_asset_alias_reference - -alias_id - - [INTEGER] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL + +dag_schedule_asset_alias_reference + +alias_id + + [INTEGER] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL asset_alias--dag_schedule_asset_alias_reference - -0..N -1 + +0..N +1 - + asset - -asset - -id - - [INTEGER] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -extra - - [JSON] - NOT NULL - -group - - [VARCHAR(1500)] - NOT NULL - -name - - [VARCHAR(1500)] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL - -uri - - [VARCHAR(1500)] - NOT NULL + +asset + +id + + [INTEGER] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +extra + + [JSON] + NOT NULL + +group + + [VARCHAR(1500)] + NOT NULL + +name + + [VARCHAR(1500)] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL + +uri + + [VARCHAR(1500)] + NOT NULL asset--asset_alias_asset - -0..N -1 + +0..N +1 - + asset_trigger - -asset_trigger - -asset_id - - [INTEGER] - NOT NULL - -trigger_id - - [INTEGER] - NOT NULL + +asset_trigger + +asset_id + + [INTEGER] + NOT NULL + +trigger_id + + [INTEGER] + NOT NULL asset--asset_trigger - -0..N -1 + +0..N +1 - + asset_active - -asset_active - -name - - [VARCHAR(1500)] - NOT NULL - -uri - - [VARCHAR(1500)] - NOT NULL + +asset_active + +name + + [VARCHAR(1500)] + NOT NULL + +uri + + [VARCHAR(1500)] + NOT NULL asset--asset_active - -1 -1 + +1 +1 asset--asset_active - -1 -1 + +1 +1 - + dag_schedule_asset_reference - -dag_schedule_asset_reference - -asset_id - - [INTEGER] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL + +dag_schedule_asset_reference + +asset_id + + [INTEGER] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL asset--dag_schedule_asset_reference - -0..N -1 + +0..N +1 - + task_outlet_asset_reference - -task_outlet_asset_reference - -asset_id - - [INTEGER] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL + +task_outlet_asset_reference + +asset_id + + [INTEGER] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL asset--task_outlet_asset_reference - -0..N -1 + +0..N +1 - + task_inlet_asset_reference - -task_inlet_asset_reference - -asset_id - - [INTEGER] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL + +task_inlet_asset_reference + +asset_id + + [INTEGER] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL asset--task_inlet_asset_reference - -0..N -1 + +0..N +1 - + asset_dag_run_queue - -asset_dag_run_queue - -asset_id - - [INTEGER] - NOT NULL - -target_dag_id - - [VARCHAR(250)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL + +asset_dag_run_queue + +asset_id + + [INTEGER] + NOT NULL + +target_dag_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL asset--asset_dag_run_queue - -0..N -1 + +0..N +1 - + asset_event - -asset_event - -id - - [INTEGER] - NOT NULL - -asset_id - - [INTEGER] - NOT NULL - -extra - - [JSON] - NOT NULL - -source_dag_id - - [VARCHAR(250)] - -source_map_index - - [INTEGER] - -source_run_id - - [VARCHAR(250)] - -source_task_id - - [VARCHAR(250)] - -timestamp - - [TIMESTAMP] - NOT NULL + +asset_event + +id + + [INTEGER] + NOT NULL + +asset_id + + [INTEGER] + NOT NULL + +extra + + [JSON] + NOT NULL + +source_dag_id + + [VARCHAR(250)] + +source_map_index + + [INTEGER] + +source_run_id + + [VARCHAR(250)] + +source_task_id + + [VARCHAR(250)] + +timestamp + + [TIMESTAMP] + NOT NULL asset_event--asset_alias_asset_event - -0..N -1 + +0..N +1 - + dagrun_asset_event - -dagrun_asset_event - -dag_run_id - - [INTEGER] - NOT NULL - -event_id - - [INTEGER] - NOT NULL + +dagrun_asset_event + +dag_run_id + + [INTEGER] + NOT NULL + +event_id + + [INTEGER] + NOT NULL asset_event--dagrun_asset_event - -0..N -1 + +0..N +1 - + trigger - -trigger - -id - - [INTEGER] - NOT NULL - -classpath - - [VARCHAR(1000)] - NOT NULL - -created_date - - [TIMESTAMP] - NOT NULL - -kwargs - - [TEXT] - NOT NULL - -triggerer_id - - [INTEGER] + +trigger + +id + + [INTEGER] + NOT NULL + +classpath + + [VARCHAR(1000)] + NOT NULL + +created_date + + [TIMESTAMP] + NOT NULL + +kwargs + + [TEXT] + NOT NULL + +triggerer_id + + [INTEGER] trigger--asset_trigger - -0..N -1 + +0..N +1 - + task_instance - -task_instance - -id - - [UUID] - NOT NULL - -context_carrier - - [JSONB] - -custom_operator_name - - [VARCHAR(1000)] - -dag_id - - [VARCHAR(250)] - NOT NULL - -dag_version_id - - [UUID] - NOT NULL - -duration - - [DOUBLE_PRECISION] - -end_date - - [TIMESTAMP] - -executor - - [VARCHAR(1000)] - -executor_config - - [BYTEA] - -external_executor_id - - [VARCHAR(250)] - -hostname - - [VARCHAR(1000)] - -last_heartbeat_at - - [TIMESTAMP] - -map_index - - [INTEGER] - NOT NULL - -max_tries - - [INTEGER] - -next_kwargs - - [JSONB] - -next_method - - [VARCHAR(1000)] - -operator - - [VARCHAR(1000)] - -pid - - [INTEGER] - -pool - - [VARCHAR(256)] - NOT NULL - -pool_slots - - [INTEGER] - NOT NULL - -priority_weight - - [INTEGER] - -queue - - [VARCHAR(256)] - -queued_by_job_id - - [INTEGER] - -queued_dttm - - [TIMESTAMP] - -rendered_map_index - - [VARCHAR(250)] - -run_id - - [VARCHAR(250)] - NOT NULL - -scheduled_dttm - - [TIMESTAMP] - -span_status - - [VARCHAR(250)] - NOT NULL - -start_date - - [TIMESTAMP] - -state - - [VARCHAR(20)] - -task_display_name - - [VARCHAR(2000)] - -task_id - - [VARCHAR(250)] - NOT NULL - -trigger_id - - [INTEGER] - -trigger_timeout - - [TIMESTAMP] - -try_number - - [INTEGER] - -unixname - - [VARCHAR(1000)] - -updated_at - - [TIMESTAMP] + +task_instance + +id + + [UUID] + NOT NULL + +context_carrier + + [JSONB] + +custom_operator_name + + [VARCHAR(1000)] + +dag_id + + [VARCHAR(250)] + NOT NULL + +dag_version_id + + [UUID] + NOT NULL + +duration + + [DOUBLE_PRECISION] + +end_date + + [TIMESTAMP] + +executor + + [VARCHAR(1000)] + +executor_config + + [BYTEA] + +external_executor_id + + [VARCHAR(250)] + +hostname + + [VARCHAR(1000)] + +last_heartbeat_at + + [TIMESTAMP] + +map_index + + [INTEGER] + NOT NULL + +max_tries + + [INTEGER] + +next_kwargs + + [JSONB] + +next_method + + [VARCHAR(1000)] + +operator + + [VARCHAR(1000)] + +pid + + [INTEGER] + +pool + + [VARCHAR(256)] + NOT NULL + +pool_slots + + [INTEGER] + NOT NULL + +priority_weight + + [INTEGER] + +queue + + [VARCHAR(256)] + +queued_by_job_id + + [INTEGER] + +queued_dttm + + [TIMESTAMP] + +rendered_map_index + + [VARCHAR(250)] + +run_id + + [VARCHAR(250)] + NOT NULL + +scheduled_dttm + + [TIMESTAMP] + +span_status + + [VARCHAR(250)] + NOT NULL + +start_date + + [TIMESTAMP] + +state + + [VARCHAR(20)] + +task_display_name + + [VARCHAR(2000)] + +task_id + + [VARCHAR(250)] + NOT NULL + +trigger_id + + [INTEGER] + +trigger_timeout + + [TIMESTAMP] + +try_number + + [INTEGER] + +unixname + + [VARCHAR(1000)] + +updated_at + + [TIMESTAMP] trigger--task_instance - -0..N -{0,1} + +0..N +{0,1} - + task_map - -task_map - -dag_id - - [VARCHAR(250)] - NOT NULL - -map_index - - [INTEGER] - NOT NULL - -run_id - - [VARCHAR(250)] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -keys - - [JSONB] - -length - - [INTEGER] - NOT NULL + +task_map + +dag_id + + [VARCHAR(250)] + NOT NULL + +map_index + + [INTEGER] + NOT NULL + +run_id + + [VARCHAR(250)] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +keys + + [JSONB] + +length + + [INTEGER] + NOT NULL task_instance--task_map - -0..N -1 + +0..N +1 task_instance--task_map - -0..N -1 + +0..N +1 task_instance--task_map - -0..N -1 + +0..N +1 task_instance--task_map - -0..N -1 + +0..N +1 - + task_reschedule - -task_reschedule - -id - - [INTEGER] - NOT NULL - -duration - - [INTEGER] - NOT NULL - -end_date - - [TIMESTAMP] - NOT NULL - -reschedule_date - - [TIMESTAMP] - NOT NULL - -start_date - - [TIMESTAMP] - NOT NULL - -ti_id - - [UUID] - NOT NULL + +task_reschedule + +id + + [INTEGER] + NOT NULL + +duration + + [INTEGER] + NOT NULL + +end_date + + [TIMESTAMP] + NOT NULL + +reschedule_date + + [TIMESTAMP] + NOT NULL + +start_date + + [TIMESTAMP] + NOT NULL + +ti_id + + [UUID] + NOT NULL task_instance--task_reschedule - -0..N -1 + +0..N +1 - + xcom - -xcom - -dag_run_id - - [INTEGER] - NOT NULL - -key - - [VARCHAR(512)] - NOT NULL - -map_index - - [INTEGER] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -run_id - - [VARCHAR(250)] - NOT NULL - -timestamp - - [TIMESTAMP] - NOT NULL - -value - - [JSONB] + +xcom + +dag_run_id + + [INTEGER] + NOT NULL + +key + + [VARCHAR(512)] + NOT NULL + +map_index + + [INTEGER] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +run_id + + [VARCHAR(250)] + NOT NULL + +timestamp + + [TIMESTAMP] + NOT NULL + +value + + [JSONB] task_instance--xcom - -0..N -1 + +0..N +1 task_instance--xcom - -0..N -1 + +0..N +1 task_instance--xcom - -0..N -1 + +0..N +1 task_instance--xcom - -0..N -1 + +0..N +1 - + task_instance_note - -task_instance_note - -ti_id - - [UUID] - NOT NULL - -content - - [VARCHAR(1000)] - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL - -user_id - - [VARCHAR(128)] + +task_instance_note + +ti_id + + [UUID] + NOT NULL + +content + + [VARCHAR(1000)] + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL + +user_id + + [VARCHAR(128)] task_instance--task_instance_note - -1 -1 + +1 +1 - + task_instance_history - -task_instance_history - -task_instance_id - - [UUID] - NOT NULL - -context_carrier - - [JSONB] - -custom_operator_name - - [VARCHAR(1000)] - -dag_id - - [VARCHAR(250)] - NOT NULL - -dag_version_id - - [UUID] - -duration - - [DOUBLE_PRECISION] - -end_date - - [TIMESTAMP] - -executor - - [VARCHAR(1000)] - -executor_config - - [BYTEA] - -external_executor_id - - [VARCHAR(250)] - -hostname - - [VARCHAR(1000)] - -map_index - - [INTEGER] - NOT NULL - -max_tries - - [INTEGER] - -next_kwargs - - [JSONB] - -next_method - - [VARCHAR(1000)] - -operator - - [VARCHAR(1000)] - -pid - - [INTEGER] - -pool - - [VARCHAR(256)] - NOT NULL - -pool_slots - - [INTEGER] - NOT NULL - -priority_weight - - [INTEGER] - -queue - - [VARCHAR(256)] - -queued_by_job_id - - [INTEGER] - -queued_dttm - - [TIMESTAMP] - -rendered_map_index - - [VARCHAR(250)] - -run_id - - [VARCHAR(250)] - NOT NULL - -scheduled_dttm - - [TIMESTAMP] - -span_status - - [VARCHAR(250)] - NOT NULL - -start_date - - [TIMESTAMP] - -state - - [VARCHAR(20)] - -task_display_name - - [VARCHAR(2000)] - -task_id - - [VARCHAR(250)] - NOT NULL - -trigger_id - - [INTEGER] - -trigger_timeout - - [TIMESTAMP] - -try_number - - [INTEGER] - NOT NULL - -unixname - - [VARCHAR(1000)] - -updated_at - - [TIMESTAMP] + +task_instance_history + +task_instance_id + + [UUID] + NOT NULL + +context_carrier + + [JSONB] + +custom_operator_name + + [VARCHAR(1000)] + +dag_id + + [VARCHAR(250)] + NOT NULL + +dag_version_id + + [UUID] + +duration + + [DOUBLE_PRECISION] + +end_date + + [TIMESTAMP] + +executor + + [VARCHAR(1000)] + +executor_config + + [BYTEA] + +external_executor_id + + [VARCHAR(250)] + +hostname + + [VARCHAR(1000)] + +map_index + + [INTEGER] + NOT NULL + +max_tries + + [INTEGER] + +next_kwargs + + [JSONB] + +next_method + + [VARCHAR(1000)] + +operator + + [VARCHAR(1000)] + +pid + + [INTEGER] + +pool + + [VARCHAR(256)] + NOT NULL + +pool_slots + + [INTEGER] + NOT NULL + +priority_weight + + [INTEGER] + +queue + + [VARCHAR(256)] + +queued_by_job_id + + [INTEGER] + +queued_dttm + + [TIMESTAMP] + +rendered_map_index + + [VARCHAR(250)] + +run_id + + [VARCHAR(250)] + NOT NULL + +scheduled_dttm + + [TIMESTAMP] + +span_status + + [VARCHAR(250)] + NOT NULL + +start_date + + [TIMESTAMP] + +state + + [VARCHAR(20)] + +task_display_name + + [VARCHAR(2000)] + +task_id + + [VARCHAR(250)] + NOT NULL + +trigger_id + + [INTEGER] + +trigger_timeout + + [TIMESTAMP] + +try_number + + [INTEGER] + NOT NULL + +unixname + + [VARCHAR(1000)] + +updated_at + + [TIMESTAMP] task_instance--task_instance_history - -0..N -1 + +0..N +1 task_instance--task_instance_history - -0..N -1 + +0..N +1 task_instance--task_instance_history - -0..N -1 + +0..N +1 task_instance--task_instance_history - -0..N -1 + +0..N +1 - + rendered_task_instance_fields - -rendered_task_instance_fields - -dag_id - - [VARCHAR(250)] - NOT NULL - -map_index - - [INTEGER] - NOT NULL - -run_id - - [VARCHAR(250)] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -k8s_pod_yaml - - [JSON] - -rendered_fields - - [JSON] - NOT NULL + +rendered_task_instance_fields + +dag_id + + [VARCHAR(250)] + NOT NULL + +map_index + + [INTEGER] + NOT NULL + +run_id + + [VARCHAR(250)] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +k8s_pod_yaml + + [JSON] + +rendered_fields + + [JSON] + NOT NULL task_instance--rendered_task_instance_fields - -0..N -1 + +0..N +1 task_instance--rendered_task_instance_fields - -0..N -1 + +0..N +1 task_instance--rendered_task_instance_fields - -0..N -1 + +0..N +1 task_instance--rendered_task_instance_fields - -0..N -1 + +0..N +1 - + dag_bundle - -dag_bundle - -name - - [VARCHAR(250)] - NOT NULL - -active - - [BOOLEAN] - -last_refreshed - - [TIMESTAMP] - -version - - [VARCHAR(200)] + +dag_bundle + +name + + [VARCHAR(250)] + NOT NULL + +active + + [BOOLEAN] + +last_refreshed + + [TIMESTAMP] + +version + + [VARCHAR(200)] - + dag - -dag - -dag_id - - [VARCHAR(250)] - NOT NULL - -asset_expression - - [JSON] - -bundle_name - - [VARCHAR(250)] - -bundle_version - - [VARCHAR(200)] - -dag_display_name - - [VARCHAR(2000)] - -deadline - - [JSON] - -description - - [TEXT] - -fileloc - - [VARCHAR(2000)] - -has_import_errors - - [BOOLEAN] - -has_task_concurrency_limits - - [BOOLEAN] - NOT NULL - -is_paused - - [BOOLEAN] - -is_stale - - [BOOLEAN] - -last_expired - - [TIMESTAMP] - -last_parsed_time - - [TIMESTAMP] - -max_active_runs - - [INTEGER] - -max_active_tasks - - [INTEGER] - NOT NULL - -max_consecutive_failed_dag_runs - - [INTEGER] - NOT NULL - -next_dagrun - - [TIMESTAMP] - -next_dagrun_create_after - - [TIMESTAMP] - -next_dagrun_data_interval_end - - [TIMESTAMP] - -next_dagrun_data_interval_start - - [TIMESTAMP] - -owners - - [VARCHAR(2000)] - -relative_fileloc - - [VARCHAR(2000)] - -timetable_description - - [VARCHAR(1000)] - -timetable_summary - - [TEXT] + +dag + +dag_id + + [VARCHAR(250)] + NOT NULL + +asset_expression + + [JSON] + +bundle_name + + [VARCHAR(250)] + +bundle_version + + [VARCHAR(200)] + +dag_display_name + + [VARCHAR(2000)] + +deadline + + [JSON] + +description + + [TEXT] + +fileloc + + [VARCHAR(2000)] + +has_import_errors + + [BOOLEAN] + +has_task_concurrency_limits + + [BOOLEAN] + NOT NULL + +is_paused + + [BOOLEAN] + +is_stale + + [BOOLEAN] + +last_expired + + [TIMESTAMP] + +last_parsed_time + + [TIMESTAMP] + +max_active_runs + + [INTEGER] + +max_active_tasks + + [INTEGER] + NOT NULL + +max_consecutive_failed_dag_runs + + [INTEGER] + NOT NULL + +next_dagrun + + [TIMESTAMP] + +next_dagrun_create_after + + [TIMESTAMP] + +next_dagrun_data_interval_end + + [TIMESTAMP] + +next_dagrun_data_interval_start + + [TIMESTAMP] + +owners + + [VARCHAR(2000)] + +relative_fileloc + + [VARCHAR(2000)] + +timetable_description + + [VARCHAR(1000)] + +timetable_summary + + [TEXT] dag_bundle--dag - -0..N -{0,1} + +0..N +{0,1} dag--dag_schedule_asset_alias_reference - -0..N -1 + +0..N +1 dag--dag_schedule_asset_reference - -0..N -1 + +0..N +1 dag--task_outlet_asset_reference - -0..N -1 + +0..N +1 dag--task_inlet_asset_reference - -0..N -1 + +0..N +1 dag--asset_dag_run_queue - -0..N -1 + +0..N +1 - + dag_schedule_asset_name_reference - -dag_schedule_asset_name_reference - -dag_id - - [VARCHAR(250)] - NOT NULL - -name - - [VARCHAR(1500)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL + +dag_schedule_asset_name_reference + +dag_id + + [VARCHAR(250)] + NOT NULL + +name + + [VARCHAR(1500)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL dag--dag_schedule_asset_name_reference - -0..N -1 + +0..N +1 - + dag_schedule_asset_uri_reference - -dag_schedule_asset_uri_reference - -dag_id - - [VARCHAR(250)] - NOT NULL - -uri - - [VARCHAR(1500)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL + +dag_schedule_asset_uri_reference + +dag_id + + [VARCHAR(250)] + NOT NULL + +uri + + [VARCHAR(1500)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL dag--dag_schedule_asset_uri_reference - -0..N -1 + +0..N +1 - + dag_version - -dag_version - -id - - [UUID] - NOT NULL - -bundle_name - - [VARCHAR(250)] - -bundle_version - - [VARCHAR(250)] - -created_at - - [TIMESTAMP] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -last_updated - - [TIMESTAMP] - NOT NULL - -version_number - - [INTEGER] - NOT NULL + +dag_version + +id + + [UUID] + NOT NULL + +bundle_name + + [VARCHAR(250)] + +bundle_version + + [VARCHAR(250)] + +created_at + + [TIMESTAMP] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +last_updated + + [TIMESTAMP] + NOT NULL + +version_number + + [INTEGER] + NOT NULL dag--dag_version - -0..N -1 + +0..N +1 - + dag_tag - -dag_tag - -dag_id - - [VARCHAR(250)] - NOT NULL - -name - - [VARCHAR(100)] - NOT NULL + +dag_tag + +dag_id + + [VARCHAR(250)] + NOT NULL + +name + + [VARCHAR(100)] + NOT NULL dag--dag_tag - -0..N -1 + +0..N +1 - + dag_owner_attributes - -dag_owner_attributes - -dag_id - - [VARCHAR(250)] - NOT NULL - -owner - - [VARCHAR(500)] - NOT NULL - -link - - [VARCHAR(500)] - NOT NULL + +dag_owner_attributes + +dag_id + + [VARCHAR(250)] + NOT NULL + +owner + + [VARCHAR(500)] + NOT NULL + +link + + [VARCHAR(500)] + NOT NULL dag--dag_owner_attributes - -0..N -1 + +0..N +1 - + dag_warning - -dag_warning - -dag_id - - [VARCHAR(250)] - NOT NULL - -warning_type - - [VARCHAR(50)] - NOT NULL - -message - - [TEXT] - NOT NULL - -timestamp - - [TIMESTAMP] - NOT NULL + +dag_warning + +dag_id + + [VARCHAR(250)] + NOT NULL + +warning_type + + [VARCHAR(50)] + NOT NULL + +message + + [TEXT] + NOT NULL + +timestamp + + [TIMESTAMP] + NOT NULL dag--dag_warning - -0..N -1 + +0..N +1 - + dag_favorite - -dag_favorite - -dag_id - - [VARCHAR(250)] - NOT NULL - -user_id - - [VARCHAR(250)] - NOT NULL + +dag_favorite + +dag_id + + [VARCHAR(250)] + NOT NULL + +user_id + + [VARCHAR(250)] + NOT NULL dag--dag_favorite - -0..N -1 + +0..N +1 - + deadline - -deadline - -id - - [UUID] - NOT NULL - -callback - - [VARCHAR(500)] - NOT NULL - -callback_kwargs - - [JSON] - -dag_id - - [VARCHAR(250)] - -dagrun_id - - [INTEGER] - -deadline_time - - [TIMESTAMP] - NOT NULL + +deadline + +id + + [UUID] + NOT NULL + +callback + + [VARCHAR(500)] + NOT NULL + +callback_kwargs + + [JSON] + +dag_id + + [VARCHAR(250)] + +dagrun_id + + [INTEGER] + +deadline_time + + [TIMESTAMP] + NOT NULL dag--deadline - -0..N -{0,1} + +0..N +{0,1} dag_version--task_instance - -0..N -1 + +0..N +1 - + dag_run - -dag_run - -id - - [INTEGER] - NOT NULL - -backfill_id - - [INTEGER] - -bundle_version - - [VARCHAR(250)] - -clear_number - - [INTEGER] - NOT NULL - -conf - - [JSONB] - -context_carrier - - [JSONB] - -created_dag_version_id - - [UUID] - -creating_job_id - - [INTEGER] - -dag_id - - [VARCHAR(250)] - NOT NULL - -data_interval_end - - [TIMESTAMP] - -data_interval_start - - [TIMESTAMP] - -end_date - - [TIMESTAMP] - -last_scheduling_decision - - [TIMESTAMP] - -log_template_id - - [INTEGER] - -logical_date - - [TIMESTAMP] - -queued_at - - [TIMESTAMP] - -run_after - - [TIMESTAMP] - NOT NULL - -run_id - - [VARCHAR(250)] - NOT NULL - -run_type - - [VARCHAR(50)] - NOT NULL - -scheduled_by_job_id - - [INTEGER] - -span_status - - [VARCHAR(250)] - NOT NULL - -start_date - - [TIMESTAMP] - -state - - [VARCHAR(50)] - -triggered_by - - [VARCHAR(50)] - -triggering_user_name - - [VARCHAR(512)] - -updated_at - - [TIMESTAMP] + +dag_run + +id + + [INTEGER] + NOT NULL + +backfill_id + + [INTEGER] + +bundle_version + + [VARCHAR(250)] + +clear_number + + [INTEGER] + NOT NULL + +conf + + [JSONB] + +context_carrier + + [JSONB] + +created_dag_version_id + + [UUID] + +creating_job_id + + [INTEGER] + +dag_id + + [VARCHAR(250)] + NOT NULL + +data_interval_end + + [TIMESTAMP] + +data_interval_start + + [TIMESTAMP] + +end_date + + [TIMESTAMP] + +last_scheduling_decision + + [TIMESTAMP] + +log_template_id + + [INTEGER] + +logical_date + + [TIMESTAMP] + +queued_at + + [TIMESTAMP] + +run_after + + [TIMESTAMP] + NOT NULL + +run_id + + [VARCHAR(250)] + NOT NULL + +run_type + + [VARCHAR(50)] + NOT NULL + +scheduled_by_job_id + + [INTEGER] + +span_status + + [VARCHAR(250)] + NOT NULL + +start_date + + [TIMESTAMP] + +state + + [VARCHAR(50)] + +triggered_by + + [VARCHAR(50)] + +triggering_user_name + + [VARCHAR(512)] + +updated_at + + [TIMESTAMP] dag_version--dag_run - -0..N -{0,1} + +0..N +{0,1} - + dag_code - -dag_code - -id - - [UUID] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -dag_version_id - - [UUID] - NOT NULL - -fileloc - - [VARCHAR(2000)] - NOT NULL - -last_updated - - [TIMESTAMP] - NOT NULL - -source_code - - [TEXT] - NOT NULL - -source_code_hash - - [VARCHAR(32)] - NOT NULL + +dag_code + +id + + [UUID] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +dag_version_id + + [UUID] + NOT NULL + +fileloc + + [VARCHAR(2000)] + NOT NULL + +last_updated + + [TIMESTAMP] + NOT NULL + +source_code + + [TEXT] + NOT NULL + +source_code_hash + + [VARCHAR(32)] + NOT NULL dag_version--dag_code - -0..N -1 + +0..N +1 - + serialized_dag - -serialized_dag - -id - - [UUID] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -dag_hash - - [VARCHAR(32)] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -dag_version_id - - [UUID] - NOT NULL - -data - - [JSON] - -data_compressed - - [BYTEA] - -last_updated - - [TIMESTAMP] - NOT NULL + +serialized_dag + +id + + [UUID] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +dag_hash + + [VARCHAR(32)] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +dag_version_id + + [UUID] + NOT NULL + +data + + [JSON] + +data_compressed + + [BYTEA] + +last_updated + + [TIMESTAMP] + NOT NULL dag_version--serialized_dag - -0..N -1 + +0..N +1 dag_run--dagrun_asset_event - -0..N -1 + +0..N +1 dag_run--task_instance - -0..N -1 + +0..N +1 dag_run--task_instance - -0..N -1 + +0..N +1 dag_run--deadline - -0..N -{0,1} + +0..N +{0,1} - + backfill_dag_run - -backfill_dag_run - -id - - [INTEGER] - NOT NULL - -backfill_id - - [INTEGER] - NOT NULL - -dag_run_id - - [INTEGER] - -exception_reason - - [VARCHAR(250)] - -logical_date - - [TIMESTAMP] - NOT NULL - -sort_ordinal - - [INTEGER] - NOT NULL + +backfill_dag_run + +id + + [INTEGER] + NOT NULL + +backfill_id + + [INTEGER] + NOT NULL + +dag_run_id + + [INTEGER] + +exception_reason + + [VARCHAR(250)] + +logical_date + + [TIMESTAMP] + NOT NULL + +sort_ordinal + + [INTEGER] + NOT NULL dag_run--backfill_dag_run - -0..N -{0,1} + +0..N +{0,1} - + dag_run_note - -dag_run_note - -dag_run_id - - [INTEGER] - NOT NULL - -content - - [VARCHAR(1000)] - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL - -user_id - - [VARCHAR(128)] + +dag_run_note + +dag_run_id + + [INTEGER] + NOT NULL + +content + + [VARCHAR(1000)] + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL + +user_id + + [VARCHAR(128)] dag_run--dag_run_note - -1 -1 + +1 +1 - + log_template - -log_template - -id - - [INTEGER] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -elasticsearch_id - - [TEXT] - NOT NULL - -filename - - [TEXT] - NOT NULL + +log_template + +id + + [INTEGER] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +elasticsearch_id + + [TEXT] + NOT NULL + +filename + + [TEXT] + NOT NULL log_template--dag_run - -0..N -{0,1} + +0..N +{0,1} - + backfill - -backfill - -id - - [INTEGER] - NOT NULL - -completed_at - - [TIMESTAMP] - -created_at - - [TIMESTAMP] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -dag_run_conf - - [JSON] - NOT NULL - -from_date - - [TIMESTAMP] - NOT NULL - -is_paused - - [BOOLEAN] - -max_active_runs - - [INTEGER] - NOT NULL - -reprocess_behavior - - [VARCHAR(250)] - NOT NULL - -to_date - - [TIMESTAMP] - NOT NULL - -triggering_user_name - - [VARCHAR(512)] - -updated_at - - [TIMESTAMP] - NOT NULL + +backfill + +id + + [INTEGER] + NOT NULL + +completed_at + + [TIMESTAMP] + +created_at + + [TIMESTAMP] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +dag_run_conf + + [JSON] + NOT NULL + +from_date + + [TIMESTAMP] + NOT NULL + +is_paused + + [BOOLEAN] + +max_active_runs + + [INTEGER] + NOT NULL + +reprocess_behavior + + [VARCHAR(250)] + NOT NULL + +to_date + + [TIMESTAMP] + NOT NULL + +triggering_user_name + + [VARCHAR(512)] + +updated_at + + [TIMESTAMP] + NOT NULL backfill--dag_run - -0..N -{0,1} + +0..N +{0,1} backfill--backfill_dag_run - -0..N -1 + +0..N +1 - + alembic_version - -alembic_version - -version_num - - [VARCHAR(32)] - NOT NULL + +alembic_version + +version_num + + [VARCHAR(32)] + NOT NULL diff --git a/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml b/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml index 8a062183f84ea..9209a307bacea 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml +++ b/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml @@ -7223,12 +7223,6 @@ paths: schema: $ref: '#/components/schemas/HTTPExceptionResponse' description: Not Found - '409': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Conflict '422': description: Validation Error content: diff --git a/providers/standard/tests/unit/standard/operators/test_hitl.py b/providers/standard/tests/unit/standard/operators/test_hitl.py index a9d565bacf793..fcc9486231531 100644 --- a/providers/standard/tests/unit/standard/operators/test_hitl.py +++ b/providers/standard/tests/unit/standard/operators/test_hitl.py @@ -17,6 +17,12 @@ from __future__ import annotations import pytest + +from tests_common.test_utils.version_compat import AIRFLOW_V_3_1_PLUS + +if not AIRFLOW_V_3_1_PLUS: + pytest.skip("Human in the loop public API compatible with Airflow >= 3.0.1", allow_module_level=True) + from sqlalchemy import select from airflow.exceptions import DownstreamTasksSkipped @@ -28,7 +34,6 @@ HITLOperator, HITLTerminationOperator, ) -from airflow.providers.standard.triggers.hitl import HITLTriggerEventSuccessPayload from airflow.sdk import Param from airflow.sdk.definitions.param import ParamsDict @@ -139,7 +144,7 @@ def test_execute_complete(self) -> None: ret = hitl_op.execute_complete( context={}, - event=HITLTriggerEventSuccessPayload(response_content=["1"], params_input={"input": 2}), + event={"response_content": ["1"], "params_input": {"input": 2}}, ) assert ret["response_content"] == ["1"] @@ -157,10 +162,10 @@ def test_validate_response_content_with_invalid_content(self) -> None: with pytest.raises(ValueError): hitl_op.execute_complete( context={}, - event=HITLTriggerEventSuccessPayload( - response_content=["not exists"], - params_input={"input": 2}, - ), + event={ + "response_content": ["not exists"], + "params_input": {"input": 2}, + }, ) def test_validate_params_input_with_invalid_input(self) -> None: @@ -175,10 +180,10 @@ def test_validate_params_input_with_invalid_input(self) -> None: with pytest.raises(ValueError): hitl_op.execute_complete( context={}, - event=HITLTriggerEventSuccessPayload( - response_content=["1"], - params_input={"no such key": 2, "input": 333}, - ), + event={ + "response_content": ["1"], + "params_input": {"no such key": 2, "input": 333}, + }, ) diff --git a/providers/standard/tests/unit/standard/triggers/test_hitl.py b/providers/standard/tests/unit/standard/triggers/test_hitl.py index 6a1edb46cd20f..cb9b58c87f905 100644 --- a/providers/standard/tests/unit/standard/triggers/test_hitl.py +++ b/providers/standard/tests/unit/standard/triggers/test_hitl.py @@ -19,7 +19,7 @@ import pytest -from tests_common.test_utils.db import AIRFLOW_V_3_1_PLUS +from tests_common.test_utils.version_compat import AIRFLOW_V_3_1_PLUS if not AIRFLOW_V_3_1_PLUS: pytest.skip("Human in the loop public API compatible with Airflow >= 3.0.1", allow_module_level=True) From ed04ffad0afd7e656b89c0e8a6923a421ae77ac0 Mon Sep 17 00:00:00 2001 From: Wei Lee Date: Wed, 9 Jul 2025 16:27:58 +0800 Subject: [PATCH 20/30] feat(hitl): add fk constraint --- .../0077_3_1_0_add_human_in_the_loop_response.py | 9 ++++++++- airflow-core/src/airflow/models/hitl.py | 12 +++++++++++- .../src/airflow/providers/standard/operators/hitl.py | 2 +- 3 files changed, 20 insertions(+), 3 deletions(-) diff --git a/airflow-core/src/airflow/migrations/versions/0077_3_1_0_add_human_in_the_loop_response.py b/airflow-core/src/airflow/migrations/versions/0077_3_1_0_add_human_in_the_loop_response.py index 7459d93b72b25..16c375691b05a 100644 --- a/airflow-core/src/airflow/migrations/versions/0077_3_1_0_add_human_in_the_loop_response.py +++ b/airflow-core/src/airflow/migrations/versions/0077_3_1_0_add_human_in_the_loop_response.py @@ -29,7 +29,7 @@ import sqlalchemy_jsonfield from alembic import op -from sqlalchemy import Boolean, Column, String, Text +from sqlalchemy import Boolean, Column, ForeignKeyConstraint, String, Text from sqlalchemy.dialects import postgresql from airflow.settings import json @@ -63,6 +63,13 @@ def upgrade(): Column("user_id", String(128), nullable=True), Column("response_content", sqlalchemy_jsonfield.JSONField(json=json), nullable=True), Column("params_input", sqlalchemy_jsonfield.JSONField(json=json), nullable=False, default={}), + ForeignKeyConstraint( + ["ti_id"], + ["task_instance.id"], + name="hitl_response_ti_fkey", + ondelete="CASCADE", + onupdate="CASCADE", + ), ) diff --git a/airflow-core/src/airflow/models/hitl.py b/airflow-core/src/airflow/models/hitl.py index 77278170a11a2..cd310c8c3c18d 100644 --- a/airflow-core/src/airflow/models/hitl.py +++ b/airflow-core/src/airflow/models/hitl.py @@ -17,7 +17,7 @@ from __future__ import annotations import sqlalchemy_jsonfield -from sqlalchemy import Boolean, Column, String, Text +from sqlalchemy import Boolean, Column, ForeignKeyConstraint, String, Text from sqlalchemy.dialects import postgresql from sqlalchemy.ext.hybrid import hybrid_property @@ -54,6 +54,16 @@ class HITLResponseModel(Base): ) params_input = Column(sqlalchemy_jsonfield.JSONField(json=json), nullable=False, default={}) + __table_args__ = ( + ForeignKeyConstraint( + (ti_id,), + ["task_instance.id"], + name="hitl_response_ti_fkey", + ondelete="CASCADE", + onupdate="CASCADE", + ), + ) + @hybrid_property def response_received(self) -> bool: return self.response_at is not None diff --git a/providers/standard/src/airflow/providers/standard/operators/hitl.py b/providers/standard/src/airflow/providers/standard/operators/hitl.py index 12ba5b1413214..5af47bf15bdc9 100644 --- a/providers/standard/src/airflow/providers/standard/operators/hitl.py +++ b/providers/standard/src/airflow/providers/standard/operators/hitl.py @@ -110,11 +110,11 @@ def execute(self, context: Context): multiple=self.multiple, params=self.serialzed_params, ) - self.log.info("Waiting for response") if self.execution_timeout: timeout_datetime = datetime.now(timezone.utc) + self.execution_timeout else: timeout_datetime = None + self.log.info("Waiting for response") # Defer the Human-in-the-loop response checking process to HITLTrigger self.defer( trigger=HITLTrigger( From fcd7c3db3166d8f3c872f02bec7e7495620c4c96 Mon Sep 17 00:00:00 2001 From: Wei Lee Date: Wed, 9 Jul 2025 16:32:18 +0800 Subject: [PATCH 21/30] feat(hitl): set params as ParamsDict --- .../standard/src/airflow/providers/standard/operators/hitl.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/providers/standard/src/airflow/providers/standard/operators/hitl.py b/providers/standard/src/airflow/providers/standard/operators/hitl.py index 5af47bf15bdc9..adc0aa7b42129 100644 --- a/providers/standard/src/airflow/providers/standard/operators/hitl.py +++ b/providers/standard/src/airflow/providers/standard/operators/hitl.py @@ -75,7 +75,7 @@ def __init__( self.default = [default] if isinstance(default, str) else default self.multiple = multiple - self.params: ParamsDict | dict = params or {} + self.params: ParamsDict = params if isinstance(params, ParamsDict) else ParamsDict(params or {}) self.validate_defaults() From 8ef4cd9e387c915d8b8a7bc5df60277c7c697910 Mon Sep 17 00:00:00 2001 From: Wei Lee Date: Wed, 9 Jul 2025 16:37:10 +0800 Subject: [PATCH 22/30] docs(hitl_operator): update body description --- .../standard/src/airflow/providers/standard/operators/hitl.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/providers/standard/src/airflow/providers/standard/operators/hitl.py b/providers/standard/src/airflow/providers/standard/operators/hitl.py index adc0aa7b42129..647ddfc636960 100644 --- a/providers/standard/src/airflow/providers/standard/operators/hitl.py +++ b/providers/standard/src/airflow/providers/standard/operators/hitl.py @@ -44,8 +44,7 @@ class HITLOperator(BaseOperator): :param subject: Headline/subject presented to the user for the interaction task. :param options: List of options that the an user can select from to complete the task. - :param body: descriptive text that might give background, hints or can provide background or summary of - details that are needed to decide. + :param body: Descriptive text (with Markdown support) that gives the details that are needed to decide. :param default: The default option and the option that is taken if timeout is passed. :param multiple: Whether the user can select one or multiple options. :param params: dictionary of parameter definitions that are in the format of Dag params such that From a0b30f84fdadaca4dfcf8981f60cabe44692ee7b Mon Sep 17 00:00:00 2001 From: Wei Lee Date: Wed, 9 Jul 2025 17:19:12 +0800 Subject: [PATCH 23/30] feat(hitl): rename hitl-responses as hitl-details --- airflow-core/docs/img/airflow_erd.sha256 | 2 +- airflow-core/docs/img/airflow_erd.svg | 3447 +++++++++-------- airflow-core/docs/migrations-ref.rst | 2 +- .../api_fastapi/core_api/datamodels/hitl.py | 20 +- .../openapi/v2-rest-api-generated.yaml | 106 +- .../core_api/routes/public/hitl.py | 93 +- .../execution_api/datamodels/hitl.py | 27 +- .../execution_api/routes/__init__.py | 2 +- .../api_fastapi/execution_api/routes/hitl.py | 82 +- .../src/airflow/jobs/triggerer_job_runner.py | 30 +- ...77_3_1_0_add_human_in_the_loop_response.py | 12 +- airflow-core/src/airflow/models/__init__.py | 2 +- airflow-core/src/airflow/models/hitl.py | 8 +- .../ui/openapi-gen/requests/services.gen.ts | 1 - .../ui/openapi-gen/requests/types.gen.ts | 4 - .../core_api/routes/public/test_hitl.py | 72 +- .../execution_api/versions/head/test_hitl.py | 28 +- .../airflowctl/api/datamodels/generated.py | 36 +- .../providers/standard/operators/hitl.py | 4 +- .../providers/standard/triggers/hitl.py | 4 +- .../unit/standard/operators/test_hitl.py | 28 +- .../tests/unit/standard/triggers/test_hitl.py | 6 +- task-sdk/src/airflow/sdk/api/client.py | 30 +- .../airflow/sdk/api/datamodels/_generated.py | 18 +- .../src/airflow/sdk/execution_time/comms.py | 34 +- .../src/airflow/sdk/execution_time/hitl.py | 24 +- .../airflow/sdk/execution_time/supervisor.py | 4 +- task-sdk/tests/task_sdk/api/test_client.py | 16 +- .../task_sdk/execution_time/test_hitl.py | 26 +- .../execution_time/test_supervisor.py | 12 +- 30 files changed, 2081 insertions(+), 2099 deletions(-) diff --git a/airflow-core/docs/img/airflow_erd.sha256 b/airflow-core/docs/img/airflow_erd.sha256 index 4a770256463ab..071d8789fb485 100644 --- a/airflow-core/docs/img/airflow_erd.sha256 +++ b/airflow-core/docs/img/airflow_erd.sha256 @@ -1 +1 @@ -98caac0163253608fb8aa38c3fe5185070c39f3af3c0887bb433c597cb0875fe \ No newline at end of file +16689ca84560a2611b67137f8fd0e44b703c2ceaca1b1db1c1fd1c9afd03e85a \ No newline at end of file diff --git a/airflow-core/docs/img/airflow_erd.svg b/airflow-core/docs/img/airflow_erd.svg index 0b725621a54f4..f960dfb5fc2fd 100644 --- a/airflow-core/docs/img/airflow_erd.svg +++ b/airflow-core/docs/img/airflow_erd.svg @@ -4,11 +4,11 @@ - - + + %3 - + dag_priority_parsing_request @@ -247,2082 +247,2089 @@ [INTEGER] NOT NULL - - -hitl_response - -hitl_response - -ti_id - - [UUID] - NOT NULL - -body - - [TEXT] - -default - - [JSON] - -multiple - - [BOOLEAN] - -options - - [JSON] - NOT NULL - -params - - [JSON] - NOT NULL - -params_input - - [JSON] - NOT NULL - -response_at - - [TIMESTAMP] - -response_content - - [JSON] - -subject - - [TEXT] - NOT NULL - -user_id - - [VARCHAR(128)] - - + slot_pool - -slot_pool - -id - - [INTEGER] - NOT NULL - -description - - [TEXT] - -include_deferred - - [BOOLEAN] - NOT NULL - -pool - - [VARCHAR(256)] - -slots - - [INTEGER] + +slot_pool + +id + + [INTEGER] + NOT NULL + +description + + [TEXT] + +include_deferred + + [BOOLEAN] + NOT NULL + +pool + + [VARCHAR(256)] + +slots + + [INTEGER] - + import_error - -import_error - -id - - [INTEGER] - NOT NULL - -bundle_name - - [VARCHAR(250)] - -filename - - [VARCHAR(1024)] - -stacktrace - - [TEXT] - -timestamp - - [TIMESTAMP] + +import_error + +id + + [INTEGER] + NOT NULL + +bundle_name + + [VARCHAR(250)] + +filename + + [VARCHAR(1024)] + +stacktrace + + [TEXT] + +timestamp + + [TIMESTAMP] - + asset_alias - -asset_alias - -id - - [INTEGER] - NOT NULL - -group - - [VARCHAR(1500)] - NOT NULL - -name - - [VARCHAR(1500)] - NOT NULL + +asset_alias + +id + + [INTEGER] + NOT NULL + +group + + [VARCHAR(1500)] + NOT NULL + +name + + [VARCHAR(1500)] + NOT NULL - + asset_alias_asset - -asset_alias_asset - -alias_id - - [INTEGER] - NOT NULL - -asset_id - - [INTEGER] - NOT NULL + +asset_alias_asset + +alias_id + + [INTEGER] + NOT NULL + +asset_id + + [INTEGER] + NOT NULL asset_alias--asset_alias_asset - -0..N -1 + +0..N +1 - + asset_alias_asset_event - -asset_alias_asset_event - -alias_id - - [INTEGER] - NOT NULL - -event_id - - [INTEGER] - NOT NULL + +asset_alias_asset_event + +alias_id + + [INTEGER] + NOT NULL + +event_id + + [INTEGER] + NOT NULL asset_alias--asset_alias_asset_event - -0..N -1 + +0..N +1 - + dag_schedule_asset_alias_reference - -dag_schedule_asset_alias_reference - -alias_id - - [INTEGER] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL + +dag_schedule_asset_alias_reference + +alias_id + + [INTEGER] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL asset_alias--dag_schedule_asset_alias_reference - -0..N -1 + +0..N +1 - + asset - -asset - -id - - [INTEGER] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -extra - - [JSON] - NOT NULL - -group - - [VARCHAR(1500)] - NOT NULL - -name - - [VARCHAR(1500)] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL - -uri - - [VARCHAR(1500)] - NOT NULL + +asset + +id + + [INTEGER] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +extra + + [JSON] + NOT NULL + +group + + [VARCHAR(1500)] + NOT NULL + +name + + [VARCHAR(1500)] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL + +uri + + [VARCHAR(1500)] + NOT NULL asset--asset_alias_asset - -0..N -1 + +0..N +1 - + asset_trigger - -asset_trigger - -asset_id - - [INTEGER] - NOT NULL - -trigger_id - - [INTEGER] - NOT NULL + +asset_trigger + +asset_id + + [INTEGER] + NOT NULL + +trigger_id + + [INTEGER] + NOT NULL asset--asset_trigger - -0..N -1 + +0..N +1 - + asset_active - -asset_active - -name - - [VARCHAR(1500)] - NOT NULL - -uri - - [VARCHAR(1500)] - NOT NULL + +asset_active + +name + + [VARCHAR(1500)] + NOT NULL + +uri + + [VARCHAR(1500)] + NOT NULL asset--asset_active - -1 -1 + +1 +1 asset--asset_active - -1 -1 + +1 +1 - + dag_schedule_asset_reference - -dag_schedule_asset_reference - -asset_id - - [INTEGER] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL + +dag_schedule_asset_reference + +asset_id + + [INTEGER] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL asset--dag_schedule_asset_reference - -0..N -1 + +0..N +1 - + task_outlet_asset_reference - -task_outlet_asset_reference - -asset_id - - [INTEGER] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL + +task_outlet_asset_reference + +asset_id + + [INTEGER] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL asset--task_outlet_asset_reference - -0..N -1 + +0..N +1 - + task_inlet_asset_reference - -task_inlet_asset_reference - -asset_id - - [INTEGER] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL + +task_inlet_asset_reference + +asset_id + + [INTEGER] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL asset--task_inlet_asset_reference - -0..N -1 + +0..N +1 - + asset_dag_run_queue - -asset_dag_run_queue - -asset_id - - [INTEGER] - NOT NULL - -target_dag_id - - [VARCHAR(250)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL + +asset_dag_run_queue + +asset_id + + [INTEGER] + NOT NULL + +target_dag_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL asset--asset_dag_run_queue - -0..N -1 + +0..N +1 - + asset_event - -asset_event - -id - - [INTEGER] - NOT NULL - -asset_id - - [INTEGER] - NOT NULL - -extra - - [JSON] - NOT NULL - -source_dag_id - - [VARCHAR(250)] - -source_map_index - - [INTEGER] - -source_run_id - - [VARCHAR(250)] - -source_task_id - - [VARCHAR(250)] - -timestamp - - [TIMESTAMP] - NOT NULL + +asset_event + +id + + [INTEGER] + NOT NULL + +asset_id + + [INTEGER] + NOT NULL + +extra + + [JSON] + NOT NULL + +source_dag_id + + [VARCHAR(250)] + +source_map_index + + [INTEGER] + +source_run_id + + [VARCHAR(250)] + +source_task_id + + [VARCHAR(250)] + +timestamp + + [TIMESTAMP] + NOT NULL asset_event--asset_alias_asset_event - -0..N -1 + +0..N +1 - + dagrun_asset_event - -dagrun_asset_event - -dag_run_id - - [INTEGER] - NOT NULL - -event_id - - [INTEGER] - NOT NULL + +dagrun_asset_event + +dag_run_id + + [INTEGER] + NOT NULL + +event_id + + [INTEGER] + NOT NULL asset_event--dagrun_asset_event - -0..N -1 + +0..N +1 - + trigger - -trigger - -id - - [INTEGER] - NOT NULL - -classpath - - [VARCHAR(1000)] - NOT NULL - -created_date - - [TIMESTAMP] - NOT NULL - -kwargs - - [TEXT] - NOT NULL - -triggerer_id - - [INTEGER] + +trigger + +id + + [INTEGER] + NOT NULL + +classpath + + [VARCHAR(1000)] + NOT NULL + +created_date + + [TIMESTAMP] + NOT NULL + +kwargs + + [TEXT] + NOT NULL + +triggerer_id + + [INTEGER] trigger--asset_trigger - -0..N -1 + +0..N +1 - + task_instance - -task_instance - -id - - [UUID] - NOT NULL - -context_carrier - - [JSONB] - -custom_operator_name - - [VARCHAR(1000)] - -dag_id - - [VARCHAR(250)] - NOT NULL - -dag_version_id - - [UUID] - NOT NULL - -duration - - [DOUBLE_PRECISION] - -end_date - - [TIMESTAMP] - -executor - - [VARCHAR(1000)] - -executor_config - - [BYTEA] - -external_executor_id - - [VARCHAR(250)] - -hostname - - [VARCHAR(1000)] - -last_heartbeat_at - - [TIMESTAMP] - -map_index - - [INTEGER] - NOT NULL - -max_tries - - [INTEGER] - -next_kwargs - - [JSONB] - -next_method - - [VARCHAR(1000)] - -operator - - [VARCHAR(1000)] - -pid - - [INTEGER] - -pool - - [VARCHAR(256)] - NOT NULL - -pool_slots - - [INTEGER] - NOT NULL - -priority_weight - - [INTEGER] - -queue - - [VARCHAR(256)] - -queued_by_job_id - - [INTEGER] - -queued_dttm - - [TIMESTAMP] - -rendered_map_index - - [VARCHAR(250)] - -run_id - - [VARCHAR(250)] - NOT NULL - -scheduled_dttm - - [TIMESTAMP] - -span_status - - [VARCHAR(250)] - NOT NULL - -start_date - - [TIMESTAMP] - -state - - [VARCHAR(20)] - -task_display_name - - [VARCHAR(2000)] - -task_id - - [VARCHAR(250)] - NOT NULL - -trigger_id - - [INTEGER] - -trigger_timeout - - [TIMESTAMP] - -try_number - - [INTEGER] - -unixname - - [VARCHAR(1000)] - -updated_at - - [TIMESTAMP] + +task_instance + +id + + [UUID] + NOT NULL + +context_carrier + + [JSONB] + +custom_operator_name + + [VARCHAR(1000)] + +dag_id + + [VARCHAR(250)] + NOT NULL + +dag_version_id + + [UUID] + NOT NULL + +duration + + [DOUBLE_PRECISION] + +end_date + + [TIMESTAMP] + +executor + + [VARCHAR(1000)] + +executor_config + + [BYTEA] + +external_executor_id + + [VARCHAR(250)] + +hostname + + [VARCHAR(1000)] + +last_heartbeat_at + + [TIMESTAMP] + +map_index + + [INTEGER] + NOT NULL + +max_tries + + [INTEGER] + +next_kwargs + + [JSONB] + +next_method + + [VARCHAR(1000)] + +operator + + [VARCHAR(1000)] + +pid + + [INTEGER] + +pool + + [VARCHAR(256)] + NOT NULL + +pool_slots + + [INTEGER] + NOT NULL + +priority_weight + + [INTEGER] + +queue + + [VARCHAR(256)] + +queued_by_job_id + + [INTEGER] + +queued_dttm + + [TIMESTAMP] + +rendered_map_index + + [VARCHAR(250)] + +run_id + + [VARCHAR(250)] + NOT NULL + +scheduled_dttm + + [TIMESTAMP] + +span_status + + [VARCHAR(250)] + NOT NULL + +start_date + + [TIMESTAMP] + +state + + [VARCHAR(20)] + +task_display_name + + [VARCHAR(2000)] + +task_id + + [VARCHAR(250)] + NOT NULL + +trigger_id + + [INTEGER] + +trigger_timeout + + [TIMESTAMP] + +try_number + + [INTEGER] + +unixname + + [VARCHAR(1000)] + +updated_at + + [TIMESTAMP] trigger--task_instance - -0..N -{0,1} + +0..N +{0,1} - + task_map - -task_map - -dag_id - - [VARCHAR(250)] - NOT NULL - -map_index - - [INTEGER] - NOT NULL - -run_id - - [VARCHAR(250)] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -keys - - [JSONB] - -length - - [INTEGER] - NOT NULL + +task_map + +dag_id + + [VARCHAR(250)] + NOT NULL + +map_index + + [INTEGER] + NOT NULL + +run_id + + [VARCHAR(250)] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +keys + + [JSONB] + +length + + [INTEGER] + NOT NULL task_instance--task_map - -0..N -1 + +0..N +1 task_instance--task_map - -0..N -1 + +0..N +1 task_instance--task_map - -0..N -1 + +0..N +1 task_instance--task_map - -0..N -1 + +0..N +1 - + task_reschedule - -task_reschedule - -id - - [INTEGER] - NOT NULL - -duration - - [INTEGER] - NOT NULL - -end_date - - [TIMESTAMP] - NOT NULL - -reschedule_date - - [TIMESTAMP] - NOT NULL - -start_date - - [TIMESTAMP] - NOT NULL - -ti_id - - [UUID] - NOT NULL + +task_reschedule + +id + + [INTEGER] + NOT NULL + +duration + + [INTEGER] + NOT NULL + +end_date + + [TIMESTAMP] + NOT NULL + +reschedule_date + + [TIMESTAMP] + NOT NULL + +start_date + + [TIMESTAMP] + NOT NULL + +ti_id + + [UUID] + NOT NULL task_instance--task_reschedule - -0..N -1 + +0..N +1 - + xcom - -xcom - -dag_run_id - - [INTEGER] - NOT NULL - -key - - [VARCHAR(512)] - NOT NULL - -map_index - - [INTEGER] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -run_id - - [VARCHAR(250)] - NOT NULL - -timestamp - - [TIMESTAMP] - NOT NULL - -value - - [JSONB] + +xcom + +dag_run_id + + [INTEGER] + NOT NULL + +key + + [VARCHAR(512)] + NOT NULL + +map_index + + [INTEGER] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +run_id + + [VARCHAR(250)] + NOT NULL + +timestamp + + [TIMESTAMP] + NOT NULL + +value + + [JSONB] task_instance--xcom - -0..N -1 + +0..N +1 task_instance--xcom - -0..N -1 + +0..N +1 task_instance--xcom - -0..N -1 + +0..N +1 task_instance--xcom - -0..N -1 + +0..N +1 - + task_instance_note - -task_instance_note - -ti_id - - [UUID] - NOT NULL - -content - - [VARCHAR(1000)] - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL - -user_id - - [VARCHAR(128)] + +task_instance_note + +ti_id + + [UUID] + NOT NULL + +content + + [VARCHAR(1000)] + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL + +user_id + + [VARCHAR(128)] task_instance--task_instance_note - -1 -1 + +1 +1 - + task_instance_history - -task_instance_history - -task_instance_id - - [UUID] - NOT NULL - -context_carrier - - [JSONB] - -custom_operator_name - - [VARCHAR(1000)] - -dag_id - - [VARCHAR(250)] - NOT NULL - -dag_version_id - - [UUID] - -duration - - [DOUBLE_PRECISION] - -end_date - - [TIMESTAMP] - -executor - - [VARCHAR(1000)] - -executor_config - - [BYTEA] - -external_executor_id - - [VARCHAR(250)] - -hostname - - [VARCHAR(1000)] - -map_index - - [INTEGER] - NOT NULL - -max_tries - - [INTEGER] - -next_kwargs - - [JSONB] - -next_method - - [VARCHAR(1000)] - -operator - - [VARCHAR(1000)] - -pid - - [INTEGER] - -pool - - [VARCHAR(256)] - NOT NULL - -pool_slots - - [INTEGER] - NOT NULL - -priority_weight - - [INTEGER] - -queue - - [VARCHAR(256)] - -queued_by_job_id - - [INTEGER] - -queued_dttm - - [TIMESTAMP] - -rendered_map_index - - [VARCHAR(250)] - -run_id - - [VARCHAR(250)] - NOT NULL - -scheduled_dttm - - [TIMESTAMP] - -span_status - - [VARCHAR(250)] - NOT NULL - -start_date - - [TIMESTAMP] - -state - - [VARCHAR(20)] - -task_display_name - - [VARCHAR(2000)] - -task_id - - [VARCHAR(250)] - NOT NULL - -trigger_id - - [INTEGER] - -trigger_timeout - - [TIMESTAMP] - -try_number - - [INTEGER] - NOT NULL - -unixname - - [VARCHAR(1000)] - -updated_at - - [TIMESTAMP] + +task_instance_history + +task_instance_id + + [UUID] + NOT NULL + +context_carrier + + [JSONB] + +custom_operator_name + + [VARCHAR(1000)] + +dag_id + + [VARCHAR(250)] + NOT NULL + +dag_version_id + + [UUID] + +duration + + [DOUBLE_PRECISION] + +end_date + + [TIMESTAMP] + +executor + + [VARCHAR(1000)] + +executor_config + + [BYTEA] + +external_executor_id + + [VARCHAR(250)] + +hostname + + [VARCHAR(1000)] + +map_index + + [INTEGER] + NOT NULL + +max_tries + + [INTEGER] + +next_kwargs + + [JSONB] + +next_method + + [VARCHAR(1000)] + +operator + + [VARCHAR(1000)] + +pid + + [INTEGER] + +pool + + [VARCHAR(256)] + NOT NULL + +pool_slots + + [INTEGER] + NOT NULL + +priority_weight + + [INTEGER] + +queue + + [VARCHAR(256)] + +queued_by_job_id + + [INTEGER] + +queued_dttm + + [TIMESTAMP] + +rendered_map_index + + [VARCHAR(250)] + +run_id + + [VARCHAR(250)] + NOT NULL + +scheduled_dttm + + [TIMESTAMP] + +span_status + + [VARCHAR(250)] + NOT NULL + +start_date + + [TIMESTAMP] + +state + + [VARCHAR(20)] + +task_display_name + + [VARCHAR(2000)] + +task_id + + [VARCHAR(250)] + NOT NULL + +trigger_id + + [INTEGER] + +trigger_timeout + + [TIMESTAMP] + +try_number + + [INTEGER] + NOT NULL + +unixname + + [VARCHAR(1000)] + +updated_at + + [TIMESTAMP] task_instance--task_instance_history - -0..N -1 + +0..N +1 task_instance--task_instance_history - -0..N -1 + +0..N +1 task_instance--task_instance_history - -0..N -1 + +0..N +1 task_instance--task_instance_history - -0..N -1 + +0..N +1 + + + +hitl_detail + +hitl_detail + +ti_id + + [UUID] + NOT NULL + +body + + [TEXT] + +default + + [JSON] + +multiple + + [BOOLEAN] + +options + + [JSON] + NOT NULL + +params + + [JSON] + NOT NULL + +params_input + + [JSON] + NOT NULL + +response_at + + [TIMESTAMP] + +response_content + + [JSON] + +subject + + [TEXT] + NOT NULL + +user_id + + [VARCHAR(128)] + + + +task_instance--hitl_detail + +1 +1 rendered_task_instance_fields - -rendered_task_instance_fields - -dag_id - - [VARCHAR(250)] - NOT NULL - -map_index - - [INTEGER] - NOT NULL - -run_id - - [VARCHAR(250)] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -k8s_pod_yaml - - [JSON] - -rendered_fields - - [JSON] - NOT NULL - - - -task_instance--rendered_task_instance_fields - -0..N -1 + +rendered_task_instance_fields + +dag_id + + [VARCHAR(250)] + NOT NULL + +map_index + + [INTEGER] + NOT NULL + +run_id + + [VARCHAR(250)] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +k8s_pod_yaml + + [JSON] + +rendered_fields + + [JSON] + NOT NULL task_instance--rendered_task_instance_fields - -0..N -1 + +0..N +1 task_instance--rendered_task_instance_fields - -0..N -1 + +0..N +1 task_instance--rendered_task_instance_fields - -0..N -1 + +0..N +1 + + + +task_instance--rendered_task_instance_fields + +0..N +1 - + dag_bundle - -dag_bundle - -name - - [VARCHAR(250)] - NOT NULL - -active - - [BOOLEAN] - -last_refreshed - - [TIMESTAMP] - -version - - [VARCHAR(200)] + +dag_bundle + +name + + [VARCHAR(250)] + NOT NULL + +active + + [BOOLEAN] + +last_refreshed + + [TIMESTAMP] + +version + + [VARCHAR(200)] - + dag - -dag - -dag_id - - [VARCHAR(250)] - NOT NULL - -asset_expression - - [JSON] - -bundle_name - - [VARCHAR(250)] - -bundle_version - - [VARCHAR(200)] - -dag_display_name - - [VARCHAR(2000)] - -deadline - - [JSON] - -description - - [TEXT] - -fileloc - - [VARCHAR(2000)] - -has_import_errors - - [BOOLEAN] - -has_task_concurrency_limits - - [BOOLEAN] - NOT NULL - -is_paused - - [BOOLEAN] - -is_stale - - [BOOLEAN] - -last_expired - - [TIMESTAMP] - -last_parsed_time - - [TIMESTAMP] - -max_active_runs - - [INTEGER] - -max_active_tasks - - [INTEGER] - NOT NULL - -max_consecutive_failed_dag_runs - - [INTEGER] - NOT NULL - -next_dagrun - - [TIMESTAMP] - -next_dagrun_create_after - - [TIMESTAMP] - -next_dagrun_data_interval_end - - [TIMESTAMP] - -next_dagrun_data_interval_start - - [TIMESTAMP] - -owners - - [VARCHAR(2000)] - -relative_fileloc - - [VARCHAR(2000)] - -timetable_description - - [VARCHAR(1000)] - -timetable_summary - - [TEXT] + +dag + +dag_id + + [VARCHAR(250)] + NOT NULL + +asset_expression + + [JSON] + +bundle_name + + [VARCHAR(250)] + +bundle_version + + [VARCHAR(200)] + +dag_display_name + + [VARCHAR(2000)] + +deadline + + [JSON] + +description + + [TEXT] + +fileloc + + [VARCHAR(2000)] + +has_import_errors + + [BOOLEAN] + +has_task_concurrency_limits + + [BOOLEAN] + NOT NULL + +is_paused + + [BOOLEAN] + +is_stale + + [BOOLEAN] + +last_expired + + [TIMESTAMP] + +last_parsed_time + + [TIMESTAMP] + +max_active_runs + + [INTEGER] + +max_active_tasks + + [INTEGER] + NOT NULL + +max_consecutive_failed_dag_runs + + [INTEGER] + NOT NULL + +next_dagrun + + [TIMESTAMP] + +next_dagrun_create_after + + [TIMESTAMP] + +next_dagrun_data_interval_end + + [TIMESTAMP] + +next_dagrun_data_interval_start + + [TIMESTAMP] + +owners + + [VARCHAR(2000)] + +relative_fileloc + + [VARCHAR(2000)] + +timetable_description + + [VARCHAR(1000)] + +timetable_summary + + [TEXT] dag_bundle--dag - -0..N -{0,1} + +0..N +{0,1} dag--dag_schedule_asset_alias_reference - -0..N -1 + +0..N +1 dag--dag_schedule_asset_reference - -0..N -1 + +0..N +1 dag--task_outlet_asset_reference - -0..N -1 + +0..N +1 dag--task_inlet_asset_reference - -0..N -1 + +0..N +1 dag--asset_dag_run_queue - -0..N -1 + +0..N +1 - + dag_schedule_asset_name_reference - -dag_schedule_asset_name_reference - -dag_id - - [VARCHAR(250)] - NOT NULL - -name - - [VARCHAR(1500)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL + +dag_schedule_asset_name_reference + +dag_id + + [VARCHAR(250)] + NOT NULL + +name + + [VARCHAR(1500)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL dag--dag_schedule_asset_name_reference - -0..N -1 + +0..N +1 - + dag_schedule_asset_uri_reference - -dag_schedule_asset_uri_reference - -dag_id - - [VARCHAR(250)] - NOT NULL - -uri - - [VARCHAR(1500)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL + +dag_schedule_asset_uri_reference + +dag_id + + [VARCHAR(250)] + NOT NULL + +uri + + [VARCHAR(1500)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL dag--dag_schedule_asset_uri_reference - -0..N -1 + +0..N +1 - + dag_version - -dag_version - -id - - [UUID] - NOT NULL - -bundle_name - - [VARCHAR(250)] - -bundle_version - - [VARCHAR(250)] - -created_at - - [TIMESTAMP] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -last_updated - - [TIMESTAMP] - NOT NULL - -version_number - - [INTEGER] - NOT NULL + +dag_version + +id + + [UUID] + NOT NULL + +bundle_name + + [VARCHAR(250)] + +bundle_version + + [VARCHAR(250)] + +created_at + + [TIMESTAMP] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +last_updated + + [TIMESTAMP] + NOT NULL + +version_number + + [INTEGER] + NOT NULL dag--dag_version - -0..N -1 + +0..N +1 - + dag_tag - -dag_tag - -dag_id - - [VARCHAR(250)] - NOT NULL - -name - - [VARCHAR(100)] - NOT NULL + +dag_tag + +dag_id + + [VARCHAR(250)] + NOT NULL + +name + + [VARCHAR(100)] + NOT NULL dag--dag_tag - -0..N -1 + +0..N +1 - + dag_owner_attributes - -dag_owner_attributes - -dag_id - - [VARCHAR(250)] - NOT NULL - -owner - - [VARCHAR(500)] - NOT NULL - -link - - [VARCHAR(500)] - NOT NULL + +dag_owner_attributes + +dag_id + + [VARCHAR(250)] + NOT NULL + +owner + + [VARCHAR(500)] + NOT NULL + +link + + [VARCHAR(500)] + NOT NULL dag--dag_owner_attributes - -0..N -1 + +0..N +1 - + dag_warning - -dag_warning - -dag_id - - [VARCHAR(250)] - NOT NULL - -warning_type - - [VARCHAR(50)] - NOT NULL - -message - - [TEXT] - NOT NULL - -timestamp - - [TIMESTAMP] - NOT NULL + +dag_warning + +dag_id + + [VARCHAR(250)] + NOT NULL + +warning_type + + [VARCHAR(50)] + NOT NULL + +message + + [TEXT] + NOT NULL + +timestamp + + [TIMESTAMP] + NOT NULL dag--dag_warning - -0..N -1 + +0..N +1 - + dag_favorite - -dag_favorite - -dag_id - - [VARCHAR(250)] - NOT NULL - -user_id - - [VARCHAR(250)] - NOT NULL + +dag_favorite + +dag_id + + [VARCHAR(250)] + NOT NULL + +user_id + + [VARCHAR(250)] + NOT NULL dag--dag_favorite - -0..N -1 + +0..N +1 - + deadline - -deadline - -id - - [UUID] - NOT NULL - -callback - - [VARCHAR(500)] - NOT NULL - -callback_kwargs - - [JSON] - -dag_id - - [VARCHAR(250)] - -dagrun_id - - [INTEGER] - -deadline_time - - [TIMESTAMP] - NOT NULL + +deadline + +id + + [UUID] + NOT NULL + +callback + + [VARCHAR(500)] + NOT NULL + +callback_kwargs + + [JSON] + +dag_id + + [VARCHAR(250)] + +dagrun_id + + [INTEGER] + +deadline_time + + [TIMESTAMP] + NOT NULL dag--deadline - -0..N -{0,1} + +0..N +{0,1} dag_version--task_instance - -0..N -1 + +0..N +1 - + dag_run - -dag_run - -id - - [INTEGER] - NOT NULL - -backfill_id - - [INTEGER] - -bundle_version - - [VARCHAR(250)] - -clear_number - - [INTEGER] - NOT NULL - -conf - - [JSONB] - -context_carrier - - [JSONB] - -created_dag_version_id - - [UUID] - -creating_job_id - - [INTEGER] - -dag_id - - [VARCHAR(250)] - NOT NULL - -data_interval_end - - [TIMESTAMP] - -data_interval_start - - [TIMESTAMP] - -end_date - - [TIMESTAMP] - -last_scheduling_decision - - [TIMESTAMP] - -log_template_id - - [INTEGER] - -logical_date - - [TIMESTAMP] - -queued_at - - [TIMESTAMP] - -run_after - - [TIMESTAMP] - NOT NULL - -run_id - - [VARCHAR(250)] - NOT NULL - -run_type - - [VARCHAR(50)] - NOT NULL - -scheduled_by_job_id - - [INTEGER] - -span_status - - [VARCHAR(250)] - NOT NULL - -start_date - - [TIMESTAMP] - -state - - [VARCHAR(50)] - -triggered_by - - [VARCHAR(50)] - -triggering_user_name - - [VARCHAR(512)] - -updated_at - - [TIMESTAMP] + +dag_run + +id + + [INTEGER] + NOT NULL + +backfill_id + + [INTEGER] + +bundle_version + + [VARCHAR(250)] + +clear_number + + [INTEGER] + NOT NULL + +conf + + [JSONB] + +context_carrier + + [JSONB] + +created_dag_version_id + + [UUID] + +creating_job_id + + [INTEGER] + +dag_id + + [VARCHAR(250)] + NOT NULL + +data_interval_end + + [TIMESTAMP] + +data_interval_start + + [TIMESTAMP] + +end_date + + [TIMESTAMP] + +last_scheduling_decision + + [TIMESTAMP] + +log_template_id + + [INTEGER] + +logical_date + + [TIMESTAMP] + +queued_at + + [TIMESTAMP] + +run_after + + [TIMESTAMP] + NOT NULL + +run_id + + [VARCHAR(250)] + NOT NULL + +run_type + + [VARCHAR(50)] + NOT NULL + +scheduled_by_job_id + + [INTEGER] + +span_status + + [VARCHAR(250)] + NOT NULL + +start_date + + [TIMESTAMP] + +state + + [VARCHAR(50)] + +triggered_by + + [VARCHAR(50)] + +triggering_user_name + + [VARCHAR(512)] + +updated_at + + [TIMESTAMP] dag_version--dag_run - -0..N -{0,1} + +0..N +{0,1} - + dag_code - -dag_code - -id - - [UUID] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -dag_version_id - - [UUID] - NOT NULL - -fileloc - - [VARCHAR(2000)] - NOT NULL - -last_updated - - [TIMESTAMP] - NOT NULL - -source_code - - [TEXT] - NOT NULL - -source_code_hash - - [VARCHAR(32)] - NOT NULL + +dag_code + +id + + [UUID] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +dag_version_id + + [UUID] + NOT NULL + +fileloc + + [VARCHAR(2000)] + NOT NULL + +last_updated + + [TIMESTAMP] + NOT NULL + +source_code + + [TEXT] + NOT NULL + +source_code_hash + + [VARCHAR(32)] + NOT NULL dag_version--dag_code - -0..N -1 + +0..N +1 - + serialized_dag - -serialized_dag - -id - - [UUID] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -dag_hash - - [VARCHAR(32)] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -dag_version_id - - [UUID] - NOT NULL - -data - - [JSON] - -data_compressed - - [BYTEA] - -last_updated - - [TIMESTAMP] - NOT NULL + +serialized_dag + +id + + [UUID] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +dag_hash + + [VARCHAR(32)] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +dag_version_id + + [UUID] + NOT NULL + +data + + [JSON] + +data_compressed + + [BYTEA] + +last_updated + + [TIMESTAMP] + NOT NULL dag_version--serialized_dag - -0..N -1 + +0..N +1 dag_run--dagrun_asset_event - -0..N -1 + +0..N +1 dag_run--task_instance - -0..N -1 + +0..N +1 dag_run--task_instance - -0..N -1 + +0..N +1 dag_run--deadline - -0..N -{0,1} + +0..N +{0,1} - + backfill_dag_run - -backfill_dag_run - -id - - [INTEGER] - NOT NULL - -backfill_id - - [INTEGER] - NOT NULL - -dag_run_id - - [INTEGER] - -exception_reason - - [VARCHAR(250)] - -logical_date - - [TIMESTAMP] - NOT NULL - -sort_ordinal - - [INTEGER] - NOT NULL + +backfill_dag_run + +id + + [INTEGER] + NOT NULL + +backfill_id + + [INTEGER] + NOT NULL + +dag_run_id + + [INTEGER] + +exception_reason + + [VARCHAR(250)] + +logical_date + + [TIMESTAMP] + NOT NULL + +sort_ordinal + + [INTEGER] + NOT NULL dag_run--backfill_dag_run - -0..N -{0,1} + +0..N +{0,1} - + dag_run_note - -dag_run_note - -dag_run_id - - [INTEGER] - NOT NULL - -content - - [VARCHAR(1000)] - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL - -user_id - - [VARCHAR(128)] + +dag_run_note + +dag_run_id + + [INTEGER] + NOT NULL + +content + + [VARCHAR(1000)] + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL + +user_id + + [VARCHAR(128)] dag_run--dag_run_note - -1 -1 + +1 +1 - + log_template - -log_template - -id - - [INTEGER] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -elasticsearch_id - - [TEXT] - NOT NULL - -filename - - [TEXT] - NOT NULL + +log_template + +id + + [INTEGER] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +elasticsearch_id + + [TEXT] + NOT NULL + +filename + + [TEXT] + NOT NULL log_template--dag_run - -0..N -{0,1} + +0..N +{0,1} - + backfill - -backfill - -id - - [INTEGER] - NOT NULL - -completed_at - - [TIMESTAMP] - -created_at - - [TIMESTAMP] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -dag_run_conf - - [JSON] - NOT NULL - -from_date - - [TIMESTAMP] - NOT NULL - -is_paused - - [BOOLEAN] - -max_active_runs - - [INTEGER] - NOT NULL - -reprocess_behavior - - [VARCHAR(250)] - NOT NULL - -to_date - - [TIMESTAMP] - NOT NULL - -triggering_user_name - - [VARCHAR(512)] - -updated_at - - [TIMESTAMP] - NOT NULL + +backfill + +id + + [INTEGER] + NOT NULL + +completed_at + + [TIMESTAMP] + +created_at + + [TIMESTAMP] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +dag_run_conf + + [JSON] + NOT NULL + +from_date + + [TIMESTAMP] + NOT NULL + +is_paused + + [BOOLEAN] + +max_active_runs + + [INTEGER] + NOT NULL + +reprocess_behavior + + [VARCHAR(250)] + NOT NULL + +to_date + + [TIMESTAMP] + NOT NULL + +triggering_user_name + + [VARCHAR(512)] + +updated_at + + [TIMESTAMP] + NOT NULL backfill--dag_run - -0..N -{0,1} + +0..N +{0,1} backfill--backfill_dag_run - -0..N -1 + +0..N +1 alembic_version - -alembic_version - -version_num - - [VARCHAR(32)] - NOT NULL + +alembic_version + +version_num + + [VARCHAR(32)] + NOT NULL diff --git a/airflow-core/docs/migrations-ref.rst b/airflow-core/docs/migrations-ref.rst index 8037d10ed06e9..0e18989fbc8a7 100644 --- a/airflow-core/docs/migrations-ref.rst +++ b/airflow-core/docs/migrations-ref.rst @@ -39,7 +39,7 @@ Here's the list of all the Database Migrations that are executed via when you ru +-------------------------+------------------+-------------------+--------------------------------------------------------------+ | Revision ID | Revises ID | Airflow Version | Description | +=========================+==================+===================+==============================================================+ -| ``40f7c30a228b`` (head) | ``5d3072c51bac`` | ``3.1.0`` | Add Human In the Loop Response table. | +| ``40f7c30a228b`` (head) | ``5d3072c51bac`` | ``3.1.0`` | Add Human In the Loop Detail table. | +-------------------------+------------------+-------------------+--------------------------------------------------------------+ | ``5d3072c51bac`` | ``ffdb0566c7c0`` | ``3.1.0`` | Make dag_version_id non-nullable in TaskInstance. | +-------------------------+------------------+-------------------+--------------------------------------------------------------+ diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/hitl.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/hitl.py index 53ed05927b441..42878c47c872c 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/hitl.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/hitl.py @@ -26,15 +26,15 @@ from airflow.sdk import Param -class UpdateHITLResponsePayload(BaseModel): - """Schema for updating the content of a Human-in-the-loop response.""" +class UpdateHITLDetailPayload(BaseModel): + """Schema for updating the content of a Human-in-the-loop detail.""" response_content: list[str] params_input: Mapping = Field(default_factory=dict) -class HITLResponseContentDetail(BaseModel): - """Response of updating a Human-in-the-loop response.""" +class HITLDetailResponse(BaseModel): + """Response of updating a Human-in-the-loop detail.""" user_id: str response_at: datetime @@ -42,12 +42,12 @@ class HITLResponseContentDetail(BaseModel): params_input: Mapping = Field(default_factory=dict) -class HITLResponseDetail(BaseModel): - """Schema for Human-in-the-loop response.""" +class HITLDetail(BaseModel): + """Schema for Human-in-the-loop detail.""" ti_id: str - # Input Request + # User Request Detail options: list[str] subject: str body: str | None = None @@ -70,8 +70,8 @@ def get_params(cls, params: dict[str, Any]) -> dict[str, Any]: return {k: v.dump() if isinstance(v, Param) else v for k, v in params.items()} -class HITLResponseDetailCollection(BaseModel): - """Schema for a collection of Human-in-the-loop responses.""" +class HITLDetailCollection(BaseModel): + """Schema for a collection of Human-in-the-loop details.""" - hitl_responses: list[HITLResponseDetail] + hitl_details: list[HITLDetail] total_entries: int diff --git a/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml b/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml index 9209a307bacea..f41b2ae11f375 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml +++ b/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml @@ -7122,13 +7122,13 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' - /api/v2/hitl-responses/{task_instance_id}: + /api/v2/hitl-details/{task_instance_id}: patch: tags: - HumanInTheLoop - summary: Update Hitl Response - description: Update a Human-in-the-loop response. - operationId: update_hitl_response + summary: Update Hitl Detail + description: Update a Human-in-the-loop detail. + operationId: update_hitl_detail security: - OAuth2PasswordBearer: [] parameters: @@ -7144,14 +7144,14 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/UpdateHITLResponsePayload' + $ref: '#/components/schemas/UpdateHITLDetailPayload' responses: '200': description: Successful Response content: application/json: schema: - $ref: '#/components/schemas/HITLResponseContentDetail' + $ref: '#/components/schemas/HITLDetailResponse' '401': content: application/json: @@ -7185,9 +7185,9 @@ paths: get: tags: - HumanInTheLoop - summary: Get Hitl Response - description: Get a Human-in-the-loop Response of a specific task instance. - operationId: get_hitl_response + summary: Get Hitl Detail + description: Get a Human-in-the-loop detail of a specific task instance. + operationId: get_hitl_detail security: - OAuth2PasswordBearer: [] parameters: @@ -7204,7 +7204,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/HITLResponseDetail' + $ref: '#/components/schemas/HITLDetail' '401': content: application/json: @@ -7229,20 +7229,20 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' - /api/v2/hitl-responses/: + /api/v2/hitl-details/: get: tags: - HumanInTheLoop - summary: Get Hitl Responses - description: Get Human-in-the-loop Responses. - operationId: get_hitl_responses + summary: Get Hitl Details + description: Get Human-in-the-loop details. + operationId: get_hitl_details responses: '200': description: Successful Response content: application/json: schema: - $ref: '#/components/schemas/HITLResponseDetailCollection' + $ref: '#/components/schemas/HITLDetailCollection' '401': description: Unauthorized content: @@ -9726,32 +9726,7 @@ components: - name title: FastAPIRootMiddlewareResponse description: Serializer for Plugin FastAPI root middleware responses. - HITLResponseContentDetail: - properties: - user_id: - type: string - title: User Id - response_at: - type: string - format: date-time - title: Response At - response_content: - items: - type: string - type: array - title: Response Content - params_input: - additionalProperties: true - type: object - title: Params Input - type: object - required: - - user_id - - response_at - - response_content - title: HITLResponseContentDetail - description: Response of updating a Human-in-the-loop response. - HITLResponseDetail: + HITLDetail: properties: ti_id: type: string @@ -9815,24 +9790,49 @@ components: - ti_id - options - subject - title: HITLResponseDetail - description: Schema for Human-in-the-loop response. - HITLResponseDetailCollection: + title: HITLDetail + description: Schema for Human-in-the-loop detail. + HITLDetailCollection: properties: - hitl_responses: + hitl_details: items: - $ref: '#/components/schemas/HITLResponseDetail' + $ref: '#/components/schemas/HITLDetail' type: array - title: Hitl Responses + title: Hitl Details total_entries: type: integer title: Total Entries type: object required: - - hitl_responses + - hitl_details - total_entries - title: HITLResponseDetailCollection - description: Schema for a collection of Human-in-the-loop responses. + title: HITLDetailCollection + description: Schema for a collection of Human-in-the-loop details. + HITLDetailResponse: + properties: + user_id: + type: string + title: User Id + response_at: + type: string + format: date-time + title: Response At + response_content: + items: + type: string + type: array + title: Response Content + params_input: + additionalProperties: true + type: object + title: Params Input + type: object + required: + - user_id + - response_at + - response_content + title: HITLDetailResponse + description: Response of updating a Human-in-the-loop detail. HTTPExceptionResponse: properties: detail: @@ -11362,7 +11362,7 @@ components: - latest_triggerer_heartbeat title: TriggererInfoResponse description: Triggerer info serializer for responses. - UpdateHITLResponsePayload: + UpdateHITLDetailPayload: properties: response_content: items: @@ -11376,8 +11376,8 @@ components: type: object required: - response_content - title: UpdateHITLResponsePayload - description: Schema for updating the content of a Human-in-the-loop response. + title: UpdateHITLDetailPayload + description: Schema for updating the content of a Human-in-the-loop detail. ValidationError: properties: loc: diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/hitl.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/hitl.py index 603ec7e01bf4a..1c3e259ef867f 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/hitl.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/hitl.py @@ -26,18 +26,18 @@ from airflow.api_fastapi.common.db.common import SessionDep, paginated_select from airflow.api_fastapi.common.router import AirflowRouter from airflow.api_fastapi.core_api.datamodels.hitl import ( - HITLResponseContentDetail, - HITLResponseDetail, - HITLResponseDetailCollection, - UpdateHITLResponsePayload, + HITLDetail, + HITLDetailCollection, + HITLDetailResponse, + UpdateHITLDetailPayload, ) from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc from airflow.api_fastapi.core_api.security import GetUserDep, ReadableTIFilterDep, requires_access_dag -from airflow.models.hitl import HITLResponseModel +from airflow.models.hitl import HITLDetail as HITLDetailModel from airflow.models.taskinstance import TaskInstance as TI from airflow.utils import timezone -hitl_router = AirflowRouter(tags=["HumanInTheLoop"], prefix="/hitl-responses") +hitl_router = AirflowRouter(tags=["HumanInTheLoop"], prefix="/hitl-details") log = structlog.get_logger(__name__) @@ -50,95 +50,82 @@ status.HTTP_409_CONFLICT, ] ), - dependencies=[ - Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.TASK_INSTANCE)), - ], + dependencies=[Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.TASK_INSTANCE))], ) -def update_hitl_response( +def update_hitl_detail( task_instance_id: UUID, - update_hitl_response_payload: UpdateHITLResponsePayload, + update_hitl_detail_payload: UpdateHITLDetailPayload, user: GetUserDep, session: SessionDep, -) -> HITLResponseContentDetail: - """Update a Human-in-the-loop response.""" +) -> HITLDetailResponse: + """Update a Human-in-the-loop detail.""" ti_id_str = str(task_instance_id) - hitl_response_model = session.scalar( - select(HITLResponseModel).where(HITLResponseModel.ti_id == ti_id_str) - ) - if not hitl_response_model: + hitl_detail_model = session.scalar(select(HITLDetailModel).where(HITLDetailModel.ti_id == ti_id_str)) + if not hitl_detail_model: raise HTTPException( status.HTTP_404_NOT_FOUND, - f"Human-in-the-loop Response does not exist for Task Instance with id {ti_id_str}", + f"Human-in-the-loop detail does not exist for Task Instance with id {ti_id_str}", ) - if hitl_response_model.response_received: + if hitl_detail_model.response_received: raise HTTPException( status.HTTP_409_CONFLICT, - f"Human-in-the-loop Response has already been updated for Task Instance with id {ti_id_str} " + f"Human-in-the-loop detail has already been updated for Task Instance with id {ti_id_str} " "and is not allowed to write again.", ) - hitl_response_model.user_id = user.get_id() - hitl_response_model.response_at = timezone.utcnow() - hitl_response_model.response_content = update_hitl_response_payload.response_content - hitl_response_model.params_input = update_hitl_response_payload.params_input - session.add(hitl_response_model) + hitl_detail_model.user_id = user.get_id() + hitl_detail_model.response_at = timezone.utcnow() + hitl_detail_model.response_content = update_hitl_detail_payload.response_content + hitl_detail_model.params_input = update_hitl_detail_payload.params_input + session.add(hitl_detail_model) session.commit() - return HITLResponseContentDetail.model_validate(hitl_response_model) + return HITLDetailResponse.model_validate(hitl_detail_model) @hitl_router.get( "/{task_instance_id}", status_code=status.HTTP_200_OK, responses=create_openapi_http_exception_doc([status.HTTP_404_NOT_FOUND]), - dependencies=[ - Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.TASK_INSTANCE)), - ], + dependencies=[Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.TASK_INSTANCE))], ) -def get_hitl_response( +def get_hitl_detail( task_instance_id: UUID, session: SessionDep, -) -> HITLResponseDetail: - """Get a Human-in-the-loop Response of a specific task instance.""" +) -> HITLDetail: + """Get a Human-in-the-loop detail of a specific task instance.""" ti_id_str = str(task_instance_id) - hitl_response_model = session.scalar( - select(HITLResponseModel).where(HITLResponseModel.ti_id == ti_id_str) - ) - if not hitl_response_model: - log.error("Human-in-the-loop response not found") + hitl_detail_model = session.scalar(select(HITLDetailModel).where(HITLDetailModel.ti_id == ti_id_str)) + if not hitl_detail_model: + log.error("Human-in-the-loop detail not found") raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail={ "reason": "not_found", - "message": "Human-in-the-loop response not found", + "message": "Human-in-the-loop detail not found", }, ) - return HITLResponseDetail.model_validate(hitl_response_model) + return HITLDetail.model_validate(hitl_detail_model) @hitl_router.get( "/", status_code=status.HTTP_200_OK, - dependencies=[ - Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.TASK_INSTANCE)), - ], + dependencies=[Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.TASK_INSTANCE))], ) -def get_hitl_responses( +def get_hitl_details( readable_ti_filter: ReadableTIFilterDep, session: SessionDep, -) -> HITLResponseDetailCollection: - """Get Human-in-the-loop Responses.""" - query = select(HITLResponseModel).join( - TI, - HITLResponseModel.ti_id == TI.id, - ) - hitl_response_select, total_entries = paginated_select( +) -> HITLDetailCollection: + """Get Human-in-the-loop details.""" + query = select(HITLDetailModel).join(TI, HITLDetailModel.ti_id == TI.id) + hitl_detail_select, total_entries = paginated_select( statement=query, filters=[readable_ti_filter], session=session, ) - hitl_responses = session.scalars(hitl_response_select) - return HITLResponseDetailCollection( - hitl_responses=hitl_responses, + hitl_details = session.scalars(hitl_detail_select) + return HITLDetailCollection( + hitl_details=hitl_details, total_entries=total_entries, ) diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/hitl.py b/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/hitl.py index 0e2eb22e0f714..5d8196a44cf5a 100644 --- a/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/hitl.py +++ b/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/hitl.py @@ -23,10 +23,11 @@ from pydantic import Field from airflow.api_fastapi.core_api.base import BaseModel +from airflow.models.hitl import HITLDetail -class HITLInputRequestResponse(BaseModel): - """Schema for the input request part of a Human-in-the-loop Response for a specific task instance.""" +class HITLDetailRequest(BaseModel): + """Schema for the request part of a Human-in-the-loop detail for a specific task instance.""" ti_id: UUID options: list[str] @@ -37,25 +38,35 @@ class HITLInputRequestResponse(BaseModel): params: dict[str, Any] = Field(default_factory=dict) -class GetHITLResponseContentDetailPayload(BaseModel): - """Schema for getting a Human-in-the-loop response content detail for a specific task instance.""" +class GetHITLDetailResponsePayload(BaseModel): + """Schema for getting the response part of a Human-in-the-loop detail for a specific task instance.""" ti_id: UUID -class UpdateHITLResponsePayload(BaseModel): - """Schema for writing a Human-in-the-loop response content detail for a specific task instance.""" +class UpdateHITLDetailPayload(BaseModel): + """Schema for writing the resposne part of a Human-in-the-loop detail for a specific task instance.""" ti_id: UUID response_content: list[str] params_input: dict[str, Any] = Field(default_factory=dict) -class HITLResponseContentDetail(BaseModel): - """Schema for Human-in-the-loop response content detail for a specific task instance.""" +class HITLDetailResponse(BaseModel): + """Schema for the response part of a Human-in-the-loop detail for a specific task instance.""" response_received: bool user_id: str | None response_at: datetime | None response_content: list[str] | None params_input: dict[str, Any] = Field(default_factory=dict) + + @classmethod + def from_hitl_detail_orm(cls, hitl_detail: HITLDetail) -> HITLDetailResponse: + return HITLDetailResponse( + response_received=hitl_detail.response_received, + response_at=hitl_detail.response_at, + user_id=hitl_detail.user_id, + response_content=hitl_detail.response_content, + params_input=hitl_detail.params_input or {}, + ) diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/routes/__init__.py b/airflow-core/src/airflow/api_fastapi/execution_api/routes/__init__.py index 5d2dd78a1a1a7..ab163f0bac569 100644 --- a/airflow-core/src/airflow/api_fastapi/execution_api/routes/__init__.py +++ b/airflow-core/src/airflow/api_fastapi/execution_api/routes/__init__.py @@ -49,6 +49,6 @@ ) authenticated_router.include_router(variables.router, prefix="/variables", tags=["Variables"]) authenticated_router.include_router(xcoms.router, prefix="/xcoms", tags=["XComs"]) -authenticated_router.include_router(hitl.router, prefix="/hitl-responses", tags=["Human in the Loop"]) +authenticated_router.include_router(hitl.router, prefix="/hitl-details", tags=["Human in the Loop"]) execution_api_router.include_router(authenticated_router) diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/routes/hitl.py b/airflow-core/src/airflow/api_fastapi/execution_api/routes/hitl.py index 50b8144256e62..50e469c80a9e4 100644 --- a/airflow-core/src/airflow/api_fastapi/execution_api/routes/hitl.py +++ b/airflow-core/src/airflow/api_fastapi/execution_api/routes/hitl.py @@ -25,11 +25,11 @@ from airflow.api_fastapi.common.db.common import SessionDep from airflow.api_fastapi.execution_api.datamodels.hitl import ( - HITLInputRequestResponse, - HITLResponseContentDetail, + HITLDetailRequest, + HITLDetailResponse, ) -from airflow.models.hitl import HITLResponseModel -from airflow.sdk.execution_time.comms import CreateHITLResponsePayload, UpdateHITLResponse +from airflow.models.hitl import HITLDetail +from airflow.sdk.execution_time.comms import CreateHITLDetailPayload, UpdateHITLDetail router = APIRouter() @@ -40,23 +40,21 @@ "/{task_instance_id}", status_code=status.HTTP_201_CREATED, ) -def add_hitl_response( +def add_hitl_detail( task_instance_id: UUID, - payload: CreateHITLResponsePayload, + payload: CreateHITLDetailPayload, session: SessionDep, -) -> HITLInputRequestResponse: - """Get Human-in-the-loop Response for a specific Task Instance.""" +) -> HITLDetailRequest: + """Get Human-in-the-loop detail for a specific Task Instance.""" ti_id_str = str(task_instance_id) - hitl_response_model = session.scalar( - select(HITLResponseModel).where(HITLResponseModel.ti_id == ti_id_str) - ) - if hitl_response_model: + hitl_detail_model = session.scalar(select(HITLDetail).where(HITLDetail.ti_id == ti_id_str)) + if hitl_detail_model: raise HTTPException( status.HTTP_409_CONFLICT, - f"Human-in-the-loop Input Request for Task Instance with id {ti_id_str} already exists.", + f"Human-in-the-loop detail for Task Instance with id {ti_id_str} already exists.", ) - hitl_input_request = HITLResponseModel( + hitl_detail = HITLDetail( ti_id=ti_id_str, options=payload.options, subject=payload.subject, @@ -65,60 +63,46 @@ def add_hitl_response( multiple=payload.multiple, params=payload.params, ) - session.add(hitl_input_request) + session.add(hitl_detail) session.commit() - return HITLInputRequestResponse.model_validate(hitl_input_request) + return HITLDetailRequest.model_validate(hitl_detail) @router.patch("/{task_instance_id}") -def update_hitl_response( +def update_hitl_detail( task_instance_id: UUID, - payload: UpdateHITLResponse, + payload: UpdateHITLDetail, session: SessionDep, -) -> HITLResponseContentDetail: - """Get Human-in-the-loop Response for a specific Task Instance.""" +) -> HITLDetailResponse: + """Update the response part of a Human-in-the-loop detail for a specific Task Instance.""" ti_id_str = str(task_instance_id) - hitl_response_model = session.execute( - select(HITLResponseModel).where(HITLResponseModel.ti_id == ti_id_str) - ).scalar() - if hitl_response_model.response_received: + hitl_detail_model = session.execute(select(HITLDetail).where(HITLDetail.ti_id == ti_id_str)).scalar() + if hitl_detail_model.response_received: raise HTTPException( status.HTTP_409_CONFLICT, - f"Human-in-the-loop Response Content for Task Instance with id {ti_id_str} already exists.", + f"Human-in-the-loop detail for Task Instance with id {ti_id_str} already exists.", ) - hitl_response_model.user_id = "Fallback to default" - hitl_response_model.response_content = payload.response_content - hitl_response_model.params_input = payload.params_input - hitl_response_model.response_at = datetime.now(timezone.utc) - session.add(hitl_response_model) + hitl_detail_model.user_id = "Fallback to default" + hitl_detail_model.response_at = datetime.now(timezone.utc) + hitl_detail_model.response_content = payload.response_content + hitl_detail_model.params_input = payload.params_input + session.add(hitl_detail_model) session.commit() - return HITLResponseContentDetail( - response_received=hitl_response_model.response_received, - response_at=hitl_response_model.response_at, - user_id=hitl_response_model.user_id, - response_content=hitl_response_model.response_content, - params_input=hitl_response_model.params_input, - ) + return HITLDetailResponse.from_hitl_detail_orm(hitl_detail_model) @router.get( "/{task_instance_id}", status_code=status.HTTP_200_OK, ) -def get_hitl_response( +def get_hitl_detail( task_instance_id: UUID, session: SessionDep, -) -> HITLResponseContentDetail: - """Get Human-in-the-loop Response for a specific Task Instance.""" +) -> HITLDetailResponse: + """Get Human-in-the-loop detail for a specific Task Instance.""" ti_id_str = str(task_instance_id) - hitl_response_model = session.execute( - select(HITLResponseModel).where(HITLResponseModel.ti_id == ti_id_str) + hitl_detail_model = session.execute( + select(HITLDetail).where(HITLDetail.ti_id == ti_id_str), ).scalar() - return HITLResponseContentDetail( - response_received=hitl_response_model.response_received, - response_at=hitl_response_model.response_at, - user_id=hitl_response_model.user_id, - response_content=hitl_response_model.response_content, - params_input=hitl_response_model.params_input or {}, - ) + return HITLDetailResponse.from_hitl_detail_orm(hitl_detail_model) diff --git a/airflow-core/src/airflow/jobs/triggerer_job_runner.py b/airflow-core/src/airflow/jobs/triggerer_job_runner.py index 139cd3f85af9a..ba355cc57afe7 100644 --- a/airflow-core/src/airflow/jobs/triggerer_job_runner.py +++ b/airflow-core/src/airflow/jobs/triggerer_job_runner.py @@ -43,7 +43,7 @@ from airflow.jobs.base_job_runner import BaseJobRunner from airflow.jobs.job import perform_heartbeat from airflow.models.trigger import Trigger -from airflow.sdk.api.datamodels._generated import HITLResponseContentDetail +from airflow.sdk.api.datamodels._generated import HITLDetailResponse from airflow.sdk.execution_time.comms import ( CommsDecoder, ConnectionResult, @@ -53,14 +53,14 @@ GetConnection, GetDagRunState, GetDRCount, - GetHITLResponseContentDetail, + GetHITLDetailResponse, GetTaskStates, GetTICount, GetVariable, GetXCom, TaskStatesResult, TICount, - UpdateHITLResponse, + UpdateHITLDetail, VariableResult, XComResult, _RequestFrame, @@ -212,13 +212,13 @@ class TriggerStateSync(BaseModel): to_cancel: set[int] -class HITLResponseContentDetailResult(HITLResponseContentDetail): - """Response to GetHITLResponseContentDetail request.""" +class HITLDetailResponseResult(HITLDetailResponse): + """Response to GetHITLDetailResponse request.""" - type: Literal["HITLResponseContentDetailResult"] = "HITLResponseContentDetailResult" + type: Literal["HITLDetailResponseResult"] = "HITLDetailResponseResult" @classmethod - def from_api_response(cls, response: HITLResponseContentDetail) -> HITLResponseContentDetailResult: + def from_api_response(cls, response: HITLDetailResponse) -> HITLDetailResponseResult: """ Create result class from API Response. @@ -226,7 +226,7 @@ def from_api_response(cls, response: HITLResponseContentDetail) -> HITLResponseC for communication between the Supervisor and the task process since it needs a discriminator field. """ - return cls(**response.model_dump(exclude_defaults=True), type="HITLResponseContentDetailResult") + return cls(**response.model_dump(exclude_defaults=True), type="HITLDetailResponseResult") ToTriggerRunner = Annotated[ @@ -239,7 +239,7 @@ def from_api_response(cls, response: HITLResponseContentDetail) -> HITLResponseC | DRCount | TICount | TaskStatesResult - | HITLResponseContentDetailResult + | HITLDetailResponseResult | ErrorResponse, Field(discriminator="type"), ] @@ -258,8 +258,8 @@ def from_api_response(cls, response: HITLResponseContentDetail) -> HITLResponseC | GetTaskStates | GetDagRunState | GetDRCount - | GetHITLResponseContentDetail - | UpdateHITLResponse, + | GetHITLDetailResponse + | UpdateHITLDetail, Field(discriminator="type"), ] """ @@ -471,16 +471,16 @@ def _handle_request(self, msg: ToTriggerSupervisor, log: FilteringBoundLogger, r resp = TaskStatesResult.from_api_response(run_id_task_state_map) else: resp = run_id_task_state_map - elif isinstance(msg, UpdateHITLResponse): + elif isinstance(msg, UpdateHITLDetail): api_resp = self.client.hitl.update_response( ti_id=msg.ti_id, response_content=msg.response_content, params_input=msg.params_input, ) - resp = HITLResponseContentDetailResult.from_api_response(response=api_resp) - elif isinstance(msg, GetHITLResponseContentDetail): + resp = HITLDetailResponseResult.from_api_response(response=api_resp) + elif isinstance(msg, GetHITLDetailResponse): api_resp = self.client.hitl.get_response_content_detail(ti_id=msg.ti_id) - resp = HITLResponseContentDetailResult.from_api_response(response=api_resp) + resp = HITLDetailResponseResult.from_api_response(response=api_resp) else: raise ValueError(f"Unknown message type {type(msg)}") diff --git a/airflow-core/src/airflow/migrations/versions/0077_3_1_0_add_human_in_the_loop_response.py b/airflow-core/src/airflow/migrations/versions/0077_3_1_0_add_human_in_the_loop_response.py index 16c375691b05a..a50451b2a0a89 100644 --- a/airflow-core/src/airflow/migrations/versions/0077_3_1_0_add_human_in_the_loop_response.py +++ b/airflow-core/src/airflow/migrations/versions/0077_3_1_0_add_human_in_the_loop_response.py @@ -17,7 +17,7 @@ # under the License. """ -Add Human In the Loop Response table. +Add Human In the Loop Detail table. Revision ID: 40f7c30a228b Revises: 5d3072c51bac @@ -44,9 +44,9 @@ def upgrade(): - """Add Human In the Loop Response table.""" + """Add Human In the Loop Detail table.""" op.create_table( - "hitl_response", + "hitl_detail", Column( "ti_id", String(length=36).with_variant(postgresql.UUID(), "postgresql"), @@ -66,7 +66,7 @@ def upgrade(): ForeignKeyConstraint( ["ti_id"], ["task_instance.id"], - name="hitl_response_ti_fkey", + name="hitl_detail_ti_fkey", ondelete="CASCADE", onupdate="CASCADE", ), @@ -74,5 +74,5 @@ def upgrade(): def downgrade(): - """Response Human In the Loop Response table.""" - op.drop_table("hitl_response") + """Response Human In the Loop Detail table.""" + op.drop_table("hitl_detail") diff --git a/airflow-core/src/airflow/models/__init__.py b/airflow-core/src/airflow/models/__init__.py index 0471ef7f40c6f..ac6c2a76e3274 100644 --- a/airflow-core/src/airflow/models/__init__.py +++ b/airflow-core/src/airflow/models/__init__.py @@ -103,7 +103,7 @@ def __getattr__(name): "DbCallbackRequest": "airflow.models.db_callback_request", "Deadline": "airflow.models.deadline", "Log": "airflow.models.log", - "HITLResponseModel": "airflow.models.hitl", + "HITLDetail": "airflow.models.hitl", "MappedOperator": "airflow.models.mappedoperator", "Operator": "airflow.models.operator", "Param": "airflow.sdk.definitions.param", diff --git a/airflow-core/src/airflow/models/hitl.py b/airflow-core/src/airflow/models/hitl.py index cd310c8c3c18d..52d4f0022ecd9 100644 --- a/airflow-core/src/airflow/models/hitl.py +++ b/airflow-core/src/airflow/models/hitl.py @@ -26,10 +26,10 @@ from airflow.utils.sqlalchemy import UtcDateTime -class HITLResponseModel(Base): - """Human-in-the-loop received response.""" +class HITLDetail(Base): + """Human-in-the-loop request and corresponding response.""" - __tablename__ = "hitl_response" + __tablename__ = "hitl_detail" ti_id = Column( String(36).with_variant(postgresql.UUID(as_uuid=False), "postgresql"), primary_key=True, @@ -58,7 +58,7 @@ class HITLResponseModel(Base): ForeignKeyConstraint( (ti_id,), ["task_instance.id"], - name="hitl_response_ti_fkey", + name="hitl_detail_ti_fkey", ondelete="CASCADE", onupdate="CASCADE", ), diff --git a/airflow-core/src/airflow/ui/openapi-gen/requests/services.gen.ts b/airflow-core/src/airflow/ui/openapi-gen/requests/services.gen.ts index 196b663c8c449..2c224d960aa43 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/requests/services.gen.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/requests/services.gen.ts @@ -3408,7 +3408,6 @@ export class HumanInTheLoopService { 401: 'Unauthorized', 403: 'Forbidden', 404: 'Not Found', - 409: 'Conflict', 422: 'Validation Error' } }); diff --git a/airflow-core/src/airflow/ui/openapi-gen/requests/types.gen.ts b/airflow-core/src/airflow/ui/openapi-gen/requests/types.gen.ts index a6dde68727068..7f11cbee8e887 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/requests/types.gen.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/requests/types.gen.ts @@ -5909,10 +5909,6 @@ export type $OpenApiTs = { * Not Found */ 404: HTTPExceptionResponse; - /** - * Conflict - */ - 409: HTTPExceptionResponse; /** * Validation Error */ diff --git a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_hitl.py b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_hitl.py index f02f32d2def57..8f6bc42aa7756 100644 --- a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_hitl.py +++ b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_hitl.py @@ -29,7 +29,7 @@ import time_machine from uuid6 import uuid7 -from airflow.models.hitl import HITLResponseModel +from airflow.models.hitl import HITLDetail if TYPE_CHECKING: from airflow.models.taskinstance import TaskInstance @@ -45,8 +45,8 @@ def sample_ti(create_task_instance) -> TaskInstance: @pytest.fixture -def sample_hitl_response(session, sample_ti) -> HITLResponseModel: - hitl_response_model = HITLResponseModel( +def sample_hitl_detail(session, sample_ti) -> HITLDetail: + hitl_detail_model = HITLDetail( ti_id=sample_ti.id, options=["Approve", "Reject"], subject="This is subject", @@ -55,14 +55,14 @@ def sample_hitl_response(session, sample_ti) -> HITLResponseModel: multiple=False, params={"input_1": 1}, ) - session.add(hitl_response_model) + session.add(hitl_detail_model) session.commit() - return hitl_response_model + return hitl_detail_model @pytest.fixture -def expected_sample_hitl_response_dict(sample_ti) -> dict[str, Any]: +def expected_sample_hitl_detail_dict(sample_ti) -> dict[str, Any]: return { "body": "this is body", "default": ["Approve"], @@ -79,12 +79,12 @@ def expected_sample_hitl_response_dict(sample_ti) -> dict[str, Any]: } -class TestUpdateHITLResponseEndpoint: +class TestUpdateHITLDetailEndpoint: @time_machine.travel(datetime(2025, 7, 3, 0, 0, 0), tick=False) - @pytest.mark.usefixtures("sample_hitl_response") + @pytest.mark.usefixtures("sample_hitl_detail") def test_should_respond_200_with_existing_response(self, test_client, sample_ti): response = test_client.patch( - f"/hitl-responses/{sample_ti.id}", + f"/hitl-details/{sample_ti.id}", json={"response_content": ["Approve"], "params_input": {"input_1": 2}}, ) @@ -97,20 +97,20 @@ def test_should_respond_200_with_existing_response(self, test_client, sample_ti) } def test_should_respond_404(self, test_client, sample_ti): - response = test_client.get(f"/hitl-responses/{sample_ti.id}") + response = test_client.get(f"/hitl-details/{sample_ti.id}") assert response.status_code == 404 assert response.json() == { "detail": { - "message": "Human-in-the-loop response not found", + "message": "Human-in-the-loop detail not found", "reason": "not_found", }, } @time_machine.travel(datetime(2025, 7, 3, 0, 0, 0), tick=False) - @pytest.mark.usefixtures("sample_hitl_response") - def test_should_respond_409(self, test_client, sample_ti, expected_sample_hitl_response_dict): + @pytest.mark.usefixtures("sample_hitl_detail") + def test_should_respond_409(self, test_client, sample_ti, expected_sample_hitl_detail_dict): response = test_client.patch( - f"/hitl-responses/{sample_ti.id}", + f"/hitl-details/{sample_ti.id}", json={"response_content": ["Approve"], "params_input": {"input_1": 2}}, ) @@ -124,79 +124,79 @@ def test_should_respond_409(self, test_client, sample_ti, expected_sample_hitl_r assert response.json() == expected_response response = test_client.patch( - f"/hitl-responses/{sample_ti.id}", + f"/hitl-details/{sample_ti.id}", json={"response_content": ["Approve"], "params_input": {"input_1": 2}}, ) assert response.status_code == 409 assert response.json() == { "detail": ( - "Human-in-the-loop Response has already been updated for Task Instance " + "Human-in-the-loop detail has already been updated for Task Instance " f"with id {sample_ti.id} " "and is not allowed to write again." ) } def test_should_respond_401(self, unauthenticated_test_client, sample_ti): - response = unauthenticated_test_client.get(f"/hitl-responses/{sample_ti.id}") + response = unauthenticated_test_client.get(f"/hitl-details/{sample_ti.id}") assert response.status_code == 401 def test_should_respond_403(self, unauthorized_test_client, sample_ti): - response = unauthorized_test_client.get(f"/hitl-responses/{sample_ti.id}") + response = unauthorized_test_client.get(f"/hitl-details/{sample_ti.id}") assert response.status_code == 403 -class TestGetHITLResponseEndpoint: - @pytest.mark.usefixtures("sample_hitl_response") +class TestGetHITLDetailEndpoint: + @pytest.mark.usefixtures("sample_hitl_detail") def test_should_respond_200_with_existing_response( - self, test_client, sample_ti, expected_sample_hitl_response_dict + self, test_client, sample_ti, expected_sample_hitl_detail_dict ): - response = test_client.get(f"/hitl-responses/{sample_ti.id}") + response = test_client.get(f"/hitl-details/{sample_ti.id}") assert response.status_code == 200 - assert response.json() == expected_sample_hitl_response_dict + assert response.json() == expected_sample_hitl_detail_dict def test_should_respond_404(self, test_client, sample_ti): - response = test_client.get(f"/hitl-responses/{sample_ti.id}") + response = test_client.get(f"/hitl-details/{sample_ti.id}") assert response.status_code == 404 assert response.json() == { "detail": { - "message": "Human-in-the-loop response not found", + "message": "Human-in-the-loop detail not found", "reason": "not_found", }, } def test_should_respond_401(self, unauthenticated_test_client, sample_ti): - response = unauthenticated_test_client.get(f"/hitl-responses/{sample_ti.id}") + response = unauthenticated_test_client.get(f"/hitl-details/{sample_ti.id}") assert response.status_code == 401 def test_should_respond_403(self, unauthorized_test_client, sample_ti): - response = unauthorized_test_client.get(f"/hitl-responses/{sample_ti.id}") + response = unauthorized_test_client.get(f"/hitl-details/{sample_ti.id}") assert response.status_code == 403 -class TestGetHITLResponsesEndpoint: - @pytest.mark.usefixtures("sample_hitl_response") +class TestGetHITLDetailsEndpoint: + @pytest.mark.usefixtures("sample_hitl_detail") def test_should_respond_200_with_existing_response( - self, test_client, sample_ti, expected_sample_hitl_response_dict + self, test_client, sample_ti, expected_sample_hitl_detail_dict ): - response = test_client.get("/hitl-responses/") + response = test_client.get("/hitl-details/") assert response.status_code == 200 assert response.json() == { - "hitl_responses": [expected_sample_hitl_response_dict], + "hitl_details": [expected_sample_hitl_detail_dict], "total_entries": 1, } def test_should_respond_200_without_response(self, test_client): - response = test_client.get("/hitl-responses/") + response = test_client.get("/hitl-details/") assert response.status_code == 200 assert response.json() == { - "hitl_responses": [], + "hitl_details": [], "total_entries": 0, } def test_should_respond_401(self, unauthenticated_test_client): - response = unauthenticated_test_client.get("/hitl-responses/") + response = unauthenticated_test_client.get("/hitl-details/") assert response.status_code == 401 def test_should_respond_403(self, unauthorized_test_client): - response = unauthorized_test_client.get("/hitl-responses/") + response = unauthorized_test_client.get("/hitl-details/") assert response.status_code == 403 diff --git a/airflow-core/tests/unit/api_fastapi/execution_api/versions/head/test_hitl.py b/airflow-core/tests/unit/api_fastapi/execution_api/versions/head/test_hitl.py index d4fcba9879cd1..3266faa98ba60 100644 --- a/airflow-core/tests/unit/api_fastapi/execution_api/versions/head/test_hitl.py +++ b/airflow-core/tests/unit/api_fastapi/execution_api/versions/head/test_hitl.py @@ -29,7 +29,7 @@ from typing import TYPE_CHECKING, Any -from airflow.models.hitl import HITLResponseModel +from airflow.models.hitl import HITLDetail if TYPE_CHECKING: from airflow.models.taskinstance import TaskInstance @@ -44,8 +44,8 @@ def sample_ti(create_task_instance) -> TaskInstance: @pytest.fixture -def sample_hitl_response(session, sample_ti) -> HITLResponseModel: - hitl_response_model = HITLResponseModel( +def sample_hitl_detail(session, sample_ti) -> HITLDetail: + hitl_detail_model = HITLDetail( ti_id=sample_ti.id, options=["Approve", "Reject"], subject="This is subject", @@ -54,14 +54,14 @@ def sample_hitl_response(session, sample_ti) -> HITLResponseModel: multiple=False, params={"input_1": 1}, ) - session.add(hitl_response_model) + session.add(hitl_detail_model) session.commit() - return hitl_response_model + return hitl_detail_model @pytest.fixture -def expected_sample_hitl_response_dict(sample_ti) -> dict[str, Any]: +def expected_sample_hitl_detail_dict(sample_ti) -> dict[str, Any]: return { "body": "this is body", "default": ["Approve"], @@ -78,12 +78,12 @@ def expected_sample_hitl_response_dict(sample_ti) -> dict[str, Any]: } -def test_add_hitl_response(client, create_task_instance, session) -> None: +def test_add_hitl_detail(client, create_task_instance, session) -> None: ti = create_task_instance() session.commit() response = client.post( - f"/execution/hitl-responses/{ti.id}", + f"/execution/hitl-details/{ti.id}", json={ "ti_id": ti.id, "options": ["Approve", "Reject"], @@ -107,10 +107,10 @@ def test_add_hitl_response(client, create_task_instance, session) -> None: @time_machine.travel(datetime(2025, 7, 3, 0, 0, 0), tick=False) -@pytest.mark.usefixtures("sample_hitl_response") -def test_update_hitl_response(client, sample_ti) -> None: +@pytest.mark.usefixtures("sample_hitl_detail") +def test_update_hitl_detail(client, sample_ti) -> None: response = client.patch( - f"/execution/hitl-responses/{sample_ti.id}", + f"/execution/hitl-details/{sample_ti.id}", json={ "ti_id": sample_ti.id, "response_content": ["Reject"], @@ -127,9 +127,9 @@ def test_update_hitl_response(client, sample_ti) -> None: } -@pytest.mark.usefixtures("sample_hitl_response") -def test_get_hitl_response(client, sample_ti) -> None: - response = client.get(f"/execution/hitl-responses/{sample_ti.id}") +@pytest.mark.usefixtures("sample_hitl_detail") +def test_get_hitl_detail(client, sample_ti) -> None: + response = client.get(f"/execution/hitl-details/{sample_ti.id}") assert response.status_code == 200 assert response.json() == { "params_input": {}, diff --git a/airflow-ctl/src/airflowctl/api/datamodels/generated.py b/airflow-ctl/src/airflowctl/api/datamodels/generated.py index d4cdd71200d92..9dd55fac264e4 100644 --- a/airflow-ctl/src/airflowctl/api/datamodels/generated.py +++ b/airflow-ctl/src/airflowctl/api/datamodels/generated.py @@ -573,20 +573,9 @@ class FastAPIRootMiddlewareResponse(BaseModel): name: Annotated[str, Field(title="Name")] -class HITLResponseContentDetail(BaseModel): +class HITLDetail(BaseModel): """ - Response of updating a Human-in-the-loop response. - """ - - user_id: Annotated[str, Field(title="User Id")] - response_at: Annotated[datetime, Field(title="Response At")] - response_content: Annotated[list[str], Field(title="Response Content")] - params_input: Annotated[dict[str, Any] | None, Field(title="Params Input")] = None - - -class HITLResponseDetail(BaseModel): - """ - Schema for Human-in-the-loop response. + Schema for Human-in-the-loop detail. """ ti_id: Annotated[str, Field(title="Ti Id")] @@ -603,15 +592,26 @@ class HITLResponseDetail(BaseModel): response_received: Annotated[bool | None, Field(title="Response Received")] = False -class HITLResponseDetailCollection(BaseModel): +class HITLDetailCollection(BaseModel): """ - Schema for a collection of Human-in-the-loop responses. + Schema for a collection of Human-in-the-loop details. """ - hitl_responses: Annotated[list[HITLResponseDetail], Field(title="Hitl Responses")] + hitl_details: Annotated[list[HITLDetail], Field(title="Hitl Details")] total_entries: Annotated[int, Field(title="Total Entries")] +class HITLDetailResponse(BaseModel): + """ + Response of updating a Human-in-the-loop detail. + """ + + user_id: Annotated[str, Field(title="User Id")] + response_at: Annotated[datetime, Field(title="Response At")] + response_content: Annotated[list[str], Field(title="Response Content")] + params_input: Annotated[dict[str, Any] | None, Field(title="Params Input")] = None + + class HTTPExceptionResponse(BaseModel): """ HTTPException Model used for error response. @@ -938,9 +938,9 @@ class TriggererInfoResponse(BaseModel): latest_triggerer_heartbeat: Annotated[str | None, Field(title="Latest Triggerer Heartbeat")] = None -class UpdateHITLResponsePayload(BaseModel): +class UpdateHITLDetailPayload(BaseModel): """ - Schema for updating the content of a Human-in-the-loop response. + Schema for updating the content of a Human-in-the-loop detail. """ response_content: Annotated[list[str], Field(title="Response Content")] diff --git a/providers/standard/src/airflow/providers/standard/operators/hitl.py b/providers/standard/src/airflow/providers/standard/operators/hitl.py index 647ddfc636960..11dda9e2d3982 100644 --- a/providers/standard/src/airflow/providers/standard/operators/hitl.py +++ b/providers/standard/src/airflow/providers/standard/operators/hitl.py @@ -32,7 +32,7 @@ from airflow.providers.standard.exceptions import HITLTriggerEventError from airflow.providers.standard.triggers.hitl import HITLTrigger, HITLTriggerEventSuccessPayload from airflow.sdk.definitions.param import ParamsDict -from airflow.sdk.execution_time.hitl import add_hitl_response +from airflow.sdk.execution_time.hitl import add_hitl_detail if TYPE_CHECKING: from airflow.sdk.definitions.context import Context @@ -100,7 +100,7 @@ def execute(self, context: Context): """Add a Human-in-the-loop Response and then defer to HITLTrigger and wait for user input.""" ti_id = context["task_instance"].id # Write Human-in-the-loop input request to DB - add_hitl_response( + add_hitl_detail( ti_id=ti_id, options=self.options, subject=self.subject, diff --git a/providers/standard/src/airflow/providers/standard/triggers/hitl.py b/providers/standard/src/airflow/providers/standard/triggers/hitl.py index fe04b9614e7cb..23df73355e98c 100644 --- a/providers/standard/src/airflow/providers/standard/triggers/hitl.py +++ b/providers/standard/src/airflow/providers/standard/triggers/hitl.py @@ -31,7 +31,7 @@ from asgiref.sync import sync_to_async from airflow.sdk.execution_time.hitl import ( - get_hitl_response_content_detail, + get_hitl_detail_content_detail, update_htil_response_content_detail, ) from airflow.triggers.base import BaseTrigger, TriggerEvent @@ -118,7 +118,7 @@ async def run(self) -> AsyncIterator[TriggerEvent]: ) return - resp = await sync_to_async(get_hitl_response_content_detail)(ti_id=self.ti_id) + resp = await sync_to_async(get_hitl_detail_content_detail)(ti_id=self.ti_id) if resp.response_received and resp.response_content: self.log.info("Responded by %s at %s", resp.user_id, resp.response_at) yield TriggerEvent( diff --git a/providers/standard/tests/unit/standard/operators/test_hitl.py b/providers/standard/tests/unit/standard/operators/test_hitl.py index fcc9486231531..f146eb7bf71cd 100644 --- a/providers/standard/tests/unit/standard/operators/test_hitl.py +++ b/providers/standard/tests/unit/standard/operators/test_hitl.py @@ -27,7 +27,7 @@ from airflow.exceptions import DownstreamTasksSkipped from airflow.models import Trigger -from airflow.models.hitl import HITLResponseModel +from airflow.models.hitl import HITLDetail from airflow.providers.standard.operators.hitl import ( ApprovalOperator, HITLEntryOperator, @@ -87,20 +87,18 @@ def test_execute(self, dag_maker, session) -> None: dr = dag_maker.create_dagrun() ti = dag_maker.run_ti(task.task_id, dr) - hitl_response_model = session.scalar( - select(HITLResponseModel).where(HITLResponseModel.ti_id == ti.id) - ) - assert hitl_response_model.ti_id == ti.id - assert hitl_response_model.subject == "This is subject" - assert hitl_response_model.options == ["1", "2", "3", "4", "5"] - assert hitl_response_model.body == "This is body" - assert hitl_response_model.default == ["1"] - assert hitl_response_model.multiple is False - assert hitl_response_model.params == {"input_1": 1} - assert hitl_response_model.response_at is None - assert hitl_response_model.user_id is None - assert hitl_response_model.response_content is None - assert hitl_response_model.params_input == {} + hitl_detail_model = session.scalar(select(HITLDetail).where(HITLDetail.ti_id == ti.id)) + assert hitl_detail_model.ti_id == ti.id + assert hitl_detail_model.subject == "This is subject" + assert hitl_detail_model.options == ["1", "2", "3", "4", "5"] + assert hitl_detail_model.body == "This is body" + assert hitl_detail_model.default == ["1"] + assert hitl_detail_model.multiple is False + assert hitl_detail_model.params == {"input_1": 1} + assert hitl_detail_model.response_at is None + assert hitl_detail_model.user_id is None + assert hitl_detail_model.response_content is None + assert hitl_detail_model.params_input == {} registered_trigger = session.scalar( select(Trigger).where(Trigger.classpath == "airflow.providers.standard.triggers.hitl.HITLTrigger") diff --git a/providers/standard/tests/unit/standard/triggers/test_hitl.py b/providers/standard/tests/unit/standard/triggers/test_hitl.py index cb9b58c87f905..009e90d163f72 100644 --- a/providers/standard/tests/unit/standard/triggers/test_hitl.py +++ b/providers/standard/tests/unit/standard/triggers/test_hitl.py @@ -30,7 +30,7 @@ from uuid6 import uuid7 -from airflow.api_fastapi.execution_api.datamodels.hitl import HITLResponseContentDetail +from airflow.api_fastapi.execution_api.datamodels.hitl import HITLDetailResponse from airflow.providers.standard.triggers.hitl import HITLTrigger, HITLTriggerEventSuccessPayload from airflow.triggers.base import TriggerEvent from airflow.utils.timezone import utcnow @@ -74,7 +74,7 @@ async def test_run_fallback_to_default_due_to_timeout(self, mock_update, mock_su timeout_datetime=utcnow() + timedelta(seconds=0.1), poke_interval=5, ) - mock_supervisor_comms.send.return_value = HITLResponseContentDetail( + mock_supervisor_comms.send.return_value = HITLDetailResponse( response_received=False, user_id=None, response_at=None, @@ -106,7 +106,7 @@ async def test_run(self, mock_update, mock_supervisor_comms): timeout_datetime=None, poke_interval=5, ) - mock_supervisor_comms.send.return_value = HITLResponseContentDetail( + mock_supervisor_comms.send.return_value = HITLDetailResponse( response_received=True, user_id="test", response_at=utcnow(), diff --git a/task-sdk/src/airflow/sdk/api/client.py b/task-sdk/src/airflow/sdk/api/client.py index 0bce11b064393..70ce783853d60 100644 --- a/task-sdk/src/airflow/sdk/api/client.py +++ b/task-sdk/src/airflow/sdk/api/client.py @@ -38,10 +38,10 @@ AssetEventsResponse, AssetResponse, ConnectionResponse, - CreateHITLResponsePayload, + CreateHITLDetailPayload, DagRunStateResponse, DagRunType, - HITLResponseContentDetail, + HITLDetailResponse, InactiveAssetsResponse, PrevSuccessfulDagRunResponse, TaskInstanceState, @@ -57,7 +57,7 @@ TISuccessStatePayload, TITerminalStatePayload, TriggerDAGRunPayload, - UpdateHITLResponse, + UpdateHITLDetail, ValidationError as RemoteValidationError, VariablePostBody, VariableResponse, @@ -69,7 +69,7 @@ from airflow.sdk.execution_time.comms import ( DRCount, ErrorResponse, - HITLInputRequestResponseResult, + HITLDetailRequestResult, OKResponse, SkipDownstreamTasks, TaskRescheduleStartDate, @@ -644,9 +644,9 @@ def add_response( default: list[str] | None = None, multiple: bool = False, params: dict[str, Any] | None = None, - ) -> HITLInputRequestResponseResult: + ) -> HITLDetailRequestResult: """Add a Human-in-the-loop response that waits for human response for a specific Task Instance.""" - payload = CreateHITLResponsePayload( + payload = CreateHITLDetailPayload( ti_id=ti_id, options=options, subject=subject, @@ -656,10 +656,10 @@ def add_response( params=params, ) resp = self.client.post( - f"/hitl-responses/{ti_id}", + f"/hitl-details/{ti_id}", content=payload.model_dump_json(), ) - return HITLInputRequestResponseResult.model_validate_json(resp.read()) + return HITLDetailRequestResult.model_validate_json(resp.read()) def update_response( self, @@ -667,23 +667,23 @@ def update_response( ti_id: uuid.UUID, response_content: list[str], params_input: dict[str, Any], - ) -> HITLResponseContentDetail: + ) -> HITLDetailResponse: """Update an existing Human-in-the-loop response.""" - payload = UpdateHITLResponse( + payload = UpdateHITLDetail( ti_id=ti_id, response_content=response_content, params_input=params_input, ) resp = self.client.patch( - f"/hitl-responses/{ti_id}", + f"/hitl-details/{ti_id}", content=payload.model_dump_json(), ) - return HITLResponseContentDetail.model_validate_json(resp.read()) + return HITLDetailResponse.model_validate_json(resp.read()) - def get_response_content_detail(self, ti_id: uuid.UUID) -> HITLResponseContentDetail: + def get_response_content_detail(self, ti_id: uuid.UUID) -> HITLDetailResponse: """Get content part of a Human-in-the-loop response for a specific Task Instance.""" - resp = self.client.get(f"/hitl-responses/{ti_id}") - return HITLResponseContentDetail.model_validate_json(resp.read()) + resp = self.client.get(f"/hitl-details/{ti_id}") + return HITLDetailResponse.model_validate_json(resp.read()) class BearerAuth(httpx.Auth): diff --git a/task-sdk/src/airflow/sdk/api/datamodels/_generated.py b/task-sdk/src/airflow/sdk/api/datamodels/_generated.py index cebdfec87ee84..541bb1397140b 100644 --- a/task-sdk/src/airflow/sdk/api/datamodels/_generated.py +++ b/task-sdk/src/airflow/sdk/api/datamodels/_generated.py @@ -102,7 +102,7 @@ class ConnectionResponse(BaseModel): extra: Annotated[str | None, Field(title="Extra")] = None -class CreateHITLResponsePayload(BaseModel): +class CreateHITLDetailPayload(BaseModel): """ Add the input request part of a Human-in-the-loop response. """ @@ -114,8 +114,8 @@ class CreateHITLResponsePayload(BaseModel): default: Annotated[list[str] | None, Field(title="Default")] = None multiple: Annotated[bool | None, Field(title="Multiple")] = False params: Annotated[dict[str, Any] | None, Field(title="Params")] = None - type: Annotated[Literal["CreateHITLResponsePayload"] | None, Field(title="Type")] = ( - "CreateHITLResponsePayload" + type: Annotated[Literal["CreateHITLDetailPayload"] | None, Field(title="Type")] = ( + "CreateHITLDetailPayload" ) @@ -171,9 +171,9 @@ class DagRunType(str, Enum): ASSET_TRIGGERED = "asset_triggered" -class HITLInputRequestResponse(BaseModel): +class HITLDetailRequest(BaseModel): """ - Schema for the input request part of a Human-in-the-loop Response for a specific task instance. + Schema for the request part of a Human-in-the-loop detail for a specific task instance. """ ti_id: Annotated[UUID, Field(title="Ti Id")] @@ -185,9 +185,9 @@ class HITLInputRequestResponse(BaseModel): params: Annotated[dict[str, Any] | None, Field(title="Params")] = None -class HITLResponseContentDetail(BaseModel): +class HITLDetailResponse(BaseModel): """ - Schema for Human-in-the-loop response content detail for a specific task instance. + Schema for the response part of a Human-in-the-loop detail for a specific task instance. """ response_received: Annotated[bool, Field(title="Response Received")] @@ -368,7 +368,7 @@ class TriggerDAGRunPayload(BaseModel): reset_dag_run: Annotated[bool | None, Field(title="Reset Dag Run")] = False -class UpdateHITLResponse(BaseModel): +class UpdateHITLDetail(BaseModel): """ Update the response content part of an existing Human-in-the-loop response. """ @@ -376,7 +376,7 @@ class UpdateHITLResponse(BaseModel): ti_id: Annotated[UUID, Field(title="Ti Id")] response_content: Annotated[list[str], Field(title="Response Content")] params_input: Annotated[dict[str, Any] | None, Field(title="Params Input")] = None - type: Annotated[Literal["UpdateHITLResponse"] | None, Field(title="Type")] = "UpdateHITLResponse" + type: Annotated[Literal["UpdateHITLDetail"] | None, Field(title="Type")] = "UpdateHITLDetail" class ValidationError(BaseModel): diff --git a/task-sdk/src/airflow/sdk/execution_time/comms.py b/task-sdk/src/airflow/sdk/execution_time/comms.py index f2ece3997151d..fb9bdbaf6ba61 100644 --- a/task-sdk/src/airflow/sdk/execution_time/comms.py +++ b/task-sdk/src/airflow/sdk/execution_time/comms.py @@ -64,8 +64,8 @@ from pydantic import AwareDatetime, BaseModel, ConfigDict, Field, JsonValue, TypeAdapter, field_serializer from airflow.api_fastapi.execution_api.datamodels.hitl import ( - GetHITLResponseContentDetailPayload, - UpdateHITLResponsePayload, + GetHITLDetailResponsePayload, + UpdateHITLDetailPayload, ) from airflow.sdk.api.datamodels._generated import ( AssetEventDagRunReference, @@ -75,7 +75,7 @@ BundleInfo, ConnectionResponse, DagRunStateResponse, - HITLInputRequestResponse, + HITLDetailRequest, InactiveAssetsResponse, PrevSuccessfulDagRunResponse, TaskInstance, @@ -563,16 +563,16 @@ class SentFDs(BaseModel): fds: list[int] -class CreateHITLResponsePayload(HITLInputRequestResponse): +class CreateHITLDetailPayload(HITLDetailRequest): """Add the input request part of a Human-in-the-loop response.""" - type: Literal["CreateHITLResponsePayload"] = "CreateHITLResponsePayload" + type: Literal["CreateHITLDetailPayload"] = "CreateHITLDetailPayload" -class HITLInputRequestResponseResult(HITLInputRequestResponse): - """Response to CreateHITLResponsePayload request.""" +class HITLDetailRequestResult(HITLDetailRequest): + """Response to CreateHITLDetailPayload request.""" - type: Literal["HITLInputRequestResponseResult"] = "HITLInputRequestResponseResult" + type: Literal["HITLDetailRequestResult"] = "HITLDetailRequestResult" ToTask = Annotated[ @@ -594,8 +594,8 @@ class HITLInputRequestResponseResult(HITLInputRequestResponse): | XComSequenceIndexResult | XComSequenceSliceResult | InactiveAssetsResult - | CreateHITLResponsePayload - | HITLInputRequestResponseResult + | CreateHITLDetailPayload + | HITLDetailRequestResult | OKResponse, Field(discriminator="type"), ] @@ -858,16 +858,16 @@ class GetDRCount(BaseModel): type: Literal["GetDRCount"] = "GetDRCount" -class GetHITLResponseContentDetail(GetHITLResponseContentDetailPayload): +class GetHITLDetailResponse(GetHITLDetailResponsePayload): """Get the response content part of a Human-in-the-loop response.""" - type: Literal["GetHITLResponseContentDetail"] = "GetHITLResponseContentDetail" + type: Literal["GetHITLDetailResponse"] = "GetHITLDetailResponse" -class UpdateHITLResponse(UpdateHITLResponsePayload): +class UpdateHITLDetail(UpdateHITLDetailPayload): """Update the response content part of an existing Human-in-the-loop response.""" - type: Literal["UpdateHITLResponse"] = "UpdateHITLResponse" + type: Literal["UpdateHITLDetail"] = "UpdateHITLDetail" ToSupervisor = Annotated[ @@ -901,8 +901,8 @@ class UpdateHITLResponse(UpdateHITLResponsePayload): | TriggerDagRun | DeleteVariable | ResendLoggingFD - | CreateHITLResponsePayload - | UpdateHITLResponse - | GetHITLResponseContentDetail, + | CreateHITLDetailPayload + | UpdateHITLDetail + | GetHITLDetailResponse, Field(discriminator="type"), ] diff --git a/task-sdk/src/airflow/sdk/execution_time/hitl.py b/task-sdk/src/airflow/sdk/execution_time/hitl.py index da46da43a9756..f15ffb218320b 100644 --- a/task-sdk/src/airflow/sdk/execution_time/hitl.py +++ b/task-sdk/src/airflow/sdk/execution_time/hitl.py @@ -21,16 +21,16 @@ from uuid import UUID from airflow.sdk.execution_time.comms import ( - CreateHITLResponsePayload, - GetHITLResponseContentDetail, - UpdateHITLResponse, + CreateHITLDetailPayload, + GetHITLDetailResponse, + UpdateHITLDetail, ) if TYPE_CHECKING: - from airflow.api_fastapi.execution_api.datamodels.hitl import HITLResponseContentDetail + from airflow.api_fastapi.execution_api.datamodels.hitl import HITLDetailResponse -def add_hitl_response( +def add_hitl_detail( ti_id: UUID, options: list[str], subject: str, @@ -42,7 +42,7 @@ def add_hitl_response( from airflow.sdk.execution_time.task_runner import SUPERVISOR_COMMS SUPERVISOR_COMMS.send( - msg=CreateHITLResponsePayload( + msg=CreateHITLDetailPayload( ti_id=ti_id, options=options, subject=subject, @@ -58,26 +58,26 @@ def update_htil_response_content_detail( ti_id: UUID, response_content: list[str], params_input: dict[str, Any], -) -> HITLResponseContentDetail: +) -> HITLDetailResponse: from airflow.sdk.execution_time.task_runner import SUPERVISOR_COMMS response = SUPERVISOR_COMMS.send( - msg=UpdateHITLResponse( + msg=UpdateHITLDetail( ti_id=ti_id, response_content=response_content, params_input=params_input, ), ) if TYPE_CHECKING: - assert isinstance(response, HITLResponseContentDetail) + assert isinstance(response, HITLDetailResponse) return response -def get_hitl_response_content_detail(ti_id: UUID) -> HITLResponseContentDetail: +def get_hitl_detail_content_detail(ti_id: UUID) -> HITLDetailResponse: from airflow.sdk.execution_time.task_runner import SUPERVISOR_COMMS - response = SUPERVISOR_COMMS.send(msg=GetHITLResponseContentDetail(ti_id=ti_id)) + response = SUPERVISOR_COMMS.send(msg=GetHITLDetailResponse(ti_id=ti_id)) if TYPE_CHECKING: - assert isinstance(response, HITLResponseContentDetail) + assert isinstance(response, HITLDetailResponse) return response diff --git a/task-sdk/src/airflow/sdk/execution_time/supervisor.py b/task-sdk/src/airflow/sdk/execution_time/supervisor.py index 1c466c9f9a8f5..9bb8394554529 100644 --- a/task-sdk/src/airflow/sdk/execution_time/supervisor.py +++ b/task-sdk/src/airflow/sdk/execution_time/supervisor.py @@ -68,7 +68,7 @@ AssetEventsResult, AssetResult, ConnectionResult, - CreateHITLResponsePayload, + CreateHITLDetailPayload, DagRunStateResult, DeferTask, DeleteVariable, @@ -1231,7 +1231,7 @@ def _handle_request(self, msg: ToSupervisor, log: FilteringBoundLogger, req_id: self._send_new_log_fd(req_id) # Since we've sent the message, return. Nothing else in this ifelse/switch should return directly return - elif isinstance(msg, CreateHITLResponsePayload): + elif isinstance(msg, CreateHITLDetailPayload): resp = self.client.hitl.add_response( ti_id=msg.ti_id, options=msg.options, diff --git a/task-sdk/tests/task_sdk/api/test_client.py b/task-sdk/tests/task_sdk/api/test_client.py index 1aa55f9c5e2ef..ff65a85415770 100644 --- a/task-sdk/tests/task_sdk/api/test_client.py +++ b/task-sdk/tests/task_sdk/api/test_client.py @@ -36,7 +36,7 @@ ConnectionResponse, DagRunState, DagRunStateResponse, - HITLResponseContentDetail, + HITLDetailResponse, VariableResponse, XComResponse, ) @@ -44,7 +44,7 @@ from airflow.sdk.execution_time.comms import ( DeferTask, ErrorResponse, - HITLInputRequestResponseResult, + HITLDetailRequestResult, OKResponse, RescheduleTask, TaskRescheduleStartDate, @@ -1165,7 +1165,7 @@ def test_add_response(self) -> None: ti_id = uuid7() def handle_request(request: httpx.Request) -> httpx.Response: - if request.url.path in (f"/hitl-responses/{ti_id}"): + if request.url.path in (f"/hitl-details/{ti_id}"): return httpx.Response( status_code=201, json={ @@ -1190,7 +1190,7 @@ def handle_request(request: httpx.Request) -> httpx.Response: params=None, multiple=False, ) - assert isinstance(result, HITLInputRequestResponseResult) + assert isinstance(result, HITLDetailRequestResult) assert result.ti_id == ti_id assert result.options == ["Approval", "Reject"] assert result.subject == "This is subject" @@ -1204,7 +1204,7 @@ def test_update_response(self, time_machine: TimeMachineFixture) -> None: ti_id = uuid7() def handle_request(request: httpx.Request) -> httpx.Response: - if request.url.path in (f"/hitl-responses/{ti_id}"): + if request.url.path in (f"/hitl-details/{ti_id}"): return httpx.Response( status_code=200, json={ @@ -1223,7 +1223,7 @@ def handle_request(request: httpx.Request) -> httpx.Response: response_content=["Approve"], params_input={}, ) - assert isinstance(result, HITLResponseContentDetail) + assert isinstance(result, HITLDetailResponse) assert result.response_received is True assert result.response_content == ["Approval"] assert result.params_input == {} @@ -1235,7 +1235,7 @@ def test_get_response_content_detail(self, time_machine: TimeMachineFixture) -> ti_id = uuid7() def handle_request(request: httpx.Request) -> httpx.Response: - if request.url.path in (f"/hitl-responses/{ti_id}"): + if request.url.path in (f"/hitl-details/{ti_id}"): return httpx.Response( status_code=200, json={ @@ -1250,7 +1250,7 @@ def handle_request(request: httpx.Request) -> httpx.Response: client = make_client(transport=httpx.MockTransport(handle_request)) result = client.hitl.get_response_content_detail(ti_id=ti_id) - assert isinstance(result, HITLResponseContentDetail) + assert isinstance(result, HITLDetailResponse) assert result.response_received is True assert result.response_content == ["Approval"] assert result.params_input == {} diff --git a/task-sdk/tests/task_sdk/execution_time/test_hitl.py b/task-sdk/tests/task_sdk/execution_time/test_hitl.py index 2ff161969912f..e4abf9004254b 100644 --- a/task-sdk/tests/task_sdk/execution_time/test_hitl.py +++ b/task-sdk/tests/task_sdk/execution_time/test_hitl.py @@ -19,11 +19,11 @@ from uuid6 import uuid7 -from airflow.sdk.api.datamodels._generated import HITLResponseContentDetail -from airflow.sdk.execution_time.comms import CreateHITLResponsePayload +from airflow.sdk.api.datamodels._generated import HITLDetailResponse +from airflow.sdk.execution_time.comms import CreateHITLDetailPayload from airflow.sdk.execution_time.hitl import ( - add_hitl_response, - get_hitl_response_content_detail, + add_hitl_detail, + get_hitl_detail_content_detail, update_htil_response_content_detail, ) from airflow.utils import timezone @@ -31,8 +31,8 @@ TI_ID = uuid7() -def test_add_hitl_response(mock_supervisor_comms) -> None: - add_hitl_response( +def test_add_hitl_detail(mock_supervisor_comms) -> None: + add_hitl_detail( ti_id=TI_ID, options=["Approve", "Reject"], subject="Subject", @@ -42,7 +42,7 @@ def test_add_hitl_response(mock_supervisor_comms) -> None: multiple=False, ) mock_supervisor_comms.send.assert_called_with( - msg=CreateHITLResponsePayload( + msg=CreateHITLDetailPayload( ti_id=TI_ID, options=["Approve", "Reject"], subject="Subject", @@ -56,7 +56,7 @@ def test_add_hitl_response(mock_supervisor_comms) -> None: def test_update_htil_response_content_detail(mock_supervisor_comms) -> None: timestamp = timezone.utcnow() - mock_supervisor_comms.send.return_value = HITLResponseContentDetail( + mock_supervisor_comms.send.return_value = HITLDetailResponse( response_received=True, response_content=["Approve"], response_at=timestamp, @@ -68,7 +68,7 @@ def test_update_htil_response_content_detail(mock_supervisor_comms) -> None: response_content=["Approve"], params_input={"input_1": 1}, ) - assert resp == HITLResponseContentDetail( + assert resp == HITLDetailResponse( response_received=True, response_content=["Approve"], response_at=timestamp, @@ -77,16 +77,16 @@ def test_update_htil_response_content_detail(mock_supervisor_comms) -> None: ) -def test_get_hitl_response_content_detail(mock_supervisor_comms) -> None: - mock_supervisor_comms.send.return_value = HITLResponseContentDetail( +def test_get_hitl_detail_content_detail(mock_supervisor_comms) -> None: + mock_supervisor_comms.send.return_value = HITLDetailResponse( response_received=False, response_content=None, response_at=None, user_id=None, params_input={}, ) - resp = get_hitl_response_content_detail(TI_ID) - assert resp == HITLResponseContentDetail( + resp = get_hitl_detail_content_detail(TI_ID) + assert resp == HITLDetailResponse( response_received=False, response_content=None, response_at=None, diff --git a/task-sdk/tests/task_sdk/execution_time/test_supervisor.py b/task-sdk/tests/task_sdk/execution_time/test_supervisor.py index 8a045f8a701f8..7983617e31158 100644 --- a/task-sdk/tests/task_sdk/execution_time/test_supervisor.py +++ b/task-sdk/tests/task_sdk/execution_time/test_supervisor.py @@ -60,7 +60,7 @@ AssetResult, CommsDecoder, ConnectionResult, - CreateHITLResponsePayload, + CreateHITLDetailPayload, DagRunStateResult, DeferTask, DeleteVariable, @@ -82,7 +82,7 @@ GetXCom, GetXComSequenceItem, GetXComSequenceSlice, - HITLInputRequestResponseResult, + HITLDetailRequestResult, InactiveAssetsResult, OKResponse, PrevSuccessfulDagRunResult, @@ -1773,7 +1773,7 @@ def watched_subprocess(self, mocker): id="get_xcom_seq_slice", ), pytest.param( - CreateHITLResponsePayload( + CreateHITLDetailPayload( ti_id=TI_ID, options=["Approve", "Reject"], subject="This is subject", @@ -1790,7 +1790,7 @@ def watched_subprocess(self, mocker): "default": ["Approve"], "multiple": False, "params": {}, - "type": "HITLInputRequestResponseResult", + "type": "HITLDetailRequestResult", }, "hitl.add_response", (), @@ -1803,7 +1803,7 @@ def watched_subprocess(self, mocker): "subject": "This is subject", "ti_id": TI_ID, }, - HITLInputRequestResponseResult( + HITLDetailRequestResult( ti_id=TI_ID, options=["Approve", "Reject"], subject="This is subject", @@ -1813,7 +1813,7 @@ def watched_subprocess(self, mocker): params={}, ), None, - id="create_hitl_response_payload", + id="create_hitl_detail_payload", ), ], ) From 214411434c6a0ec16501570f9df1c59e21f6e59b Mon Sep 17 00:00:00 2001 From: Wei Lee Date: Wed, 9 Jul 2025 17:58:16 +0800 Subject: [PATCH 24/30] feat(hitl): rename response_content as chosen_options --- .../api_fastapi/core_api/datamodels/hitl.py | 6 +- .../core_api/routes/public/hitl.py | 2 +- .../execution_api/datamodels/hitl.py | 6 +- .../api_fastapi/execution_api/routes/hitl.py | 2 +- .../src/airflow/jobs/triggerer_job_runner.py | 4 +- ...77_3_1_0_add_human_in_the_loop_response.py | 2 +- airflow-core/src/airflow/models/hitl.py | 2 +- .../airflow/ui/openapi-gen/queries/common.ts | 20 ++--- .../ui/openapi-gen/queries/ensureQueryData.ts | 18 ++-- .../ui/openapi-gen/queries/prefetch.ts | 18 ++-- .../airflow/ui/openapi-gen/queries/queries.ts | 34 ++++---- .../ui/openapi-gen/queries/suspense.ts | 18 ++-- .../ui/openapi-gen/requests/schemas.gen.ts | 86 +++++++++---------- .../ui/openapi-gen/requests/services.gen.ts | 32 +++---- .../ui/openapi-gen/requests/types.gen.ts | 64 +++++++------- .../core_api/routes/public/test_hitl.py | 12 +-- .../execution_api/versions/head/test_hitl.py | 8 +- .../airflowctl/api/datamodels/generated.py | 6 +- .../providers/standard/operators/hitl.py | 10 +-- .../providers/standard/triggers/hitl.py | 14 +-- .../unit/standard/operators/test_hitl.py | 12 +-- .../tests/unit/standard/triggers/test_hitl.py | 12 +-- task-sdk/src/airflow/sdk/api/client.py | 6 +- .../airflow/sdk/api/datamodels/_generated.py | 4 +- .../src/airflow/sdk/execution_time/hitl.py | 6 +- task-sdk/tests/task_sdk/api/test_client.py | 14 +-- .../task_sdk/execution_time/test_hitl.py | 16 ++-- 27 files changed, 217 insertions(+), 217 deletions(-) diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/hitl.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/hitl.py index 42878c47c872c..46ddde964d84d 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/hitl.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/hitl.py @@ -29,7 +29,7 @@ class UpdateHITLDetailPayload(BaseModel): """Schema for updating the content of a Human-in-the-loop detail.""" - response_content: list[str] + chosen_options: list[str] params_input: Mapping = Field(default_factory=dict) @@ -38,7 +38,7 @@ class HITLDetailResponse(BaseModel): user_id: str response_at: datetime - response_content: list[str] + chosen_options: list[str] params_input: Mapping = Field(default_factory=dict) @@ -58,7 +58,7 @@ class HITLDetail(BaseModel): # Response Content Detail user_id: str | None = None response_at: datetime | None = None - response_content: list[str] | None = None + chosen_options: list[str] | None = None params_input: dict[str, Any] = Field(default_factory=dict) response_received: bool = False diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/hitl.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/hitl.py index 1c3e259ef867f..6de052b73ee8b 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/hitl.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/hitl.py @@ -76,7 +76,7 @@ def update_hitl_detail( hitl_detail_model.user_id = user.get_id() hitl_detail_model.response_at = timezone.utcnow() - hitl_detail_model.response_content = update_hitl_detail_payload.response_content + hitl_detail_model.chosen_options = update_hitl_detail_payload.chosen_options hitl_detail_model.params_input = update_hitl_detail_payload.params_input session.add(hitl_detail_model) session.commit() diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/hitl.py b/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/hitl.py index 5d8196a44cf5a..6789ac97dad31 100644 --- a/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/hitl.py +++ b/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/hitl.py @@ -48,7 +48,7 @@ class UpdateHITLDetailPayload(BaseModel): """Schema for writing the resposne part of a Human-in-the-loop detail for a specific task instance.""" ti_id: UUID - response_content: list[str] + chosen_options: list[str] params_input: dict[str, Any] = Field(default_factory=dict) @@ -58,7 +58,7 @@ class HITLDetailResponse(BaseModel): response_received: bool user_id: str | None response_at: datetime | None - response_content: list[str] | None + chosen_options: list[str] | None params_input: dict[str, Any] = Field(default_factory=dict) @classmethod @@ -67,6 +67,6 @@ def from_hitl_detail_orm(cls, hitl_detail: HITLDetail) -> HITLDetailResponse: response_received=hitl_detail.response_received, response_at=hitl_detail.response_at, user_id=hitl_detail.user_id, - response_content=hitl_detail.response_content, + chosen_options=hitl_detail.chosen_options, params_input=hitl_detail.params_input or {}, ) diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/routes/hitl.py b/airflow-core/src/airflow/api_fastapi/execution_api/routes/hitl.py index 50e469c80a9e4..e4bc309c792f5 100644 --- a/airflow-core/src/airflow/api_fastapi/execution_api/routes/hitl.py +++ b/airflow-core/src/airflow/api_fastapi/execution_api/routes/hitl.py @@ -85,7 +85,7 @@ def update_hitl_detail( hitl_detail_model.user_id = "Fallback to default" hitl_detail_model.response_at = datetime.now(timezone.utc) - hitl_detail_model.response_content = payload.response_content + hitl_detail_model.chosen_options = payload.chosen_options hitl_detail_model.params_input = payload.params_input session.add(hitl_detail_model) session.commit() diff --git a/airflow-core/src/airflow/jobs/triggerer_job_runner.py b/airflow-core/src/airflow/jobs/triggerer_job_runner.py index ba355cc57afe7..07966655cfe99 100644 --- a/airflow-core/src/airflow/jobs/triggerer_job_runner.py +++ b/airflow-core/src/airflow/jobs/triggerer_job_runner.py @@ -474,12 +474,12 @@ def _handle_request(self, msg: ToTriggerSupervisor, log: FilteringBoundLogger, r elif isinstance(msg, UpdateHITLDetail): api_resp = self.client.hitl.update_response( ti_id=msg.ti_id, - response_content=msg.response_content, + chosen_options=msg.chosen_options, params_input=msg.params_input, ) resp = HITLDetailResponseResult.from_api_response(response=api_resp) elif isinstance(msg, GetHITLDetailResponse): - api_resp = self.client.hitl.get_response_content_detail(ti_id=msg.ti_id) + api_resp = self.client.hitl.get_detail_response(ti_id=msg.ti_id) resp = HITLDetailResponseResult.from_api_response(response=api_resp) else: raise ValueError(f"Unknown message type {type(msg)}") diff --git a/airflow-core/src/airflow/migrations/versions/0077_3_1_0_add_human_in_the_loop_response.py b/airflow-core/src/airflow/migrations/versions/0077_3_1_0_add_human_in_the_loop_response.py index a50451b2a0a89..b41b22c11bc1a 100644 --- a/airflow-core/src/airflow/migrations/versions/0077_3_1_0_add_human_in_the_loop_response.py +++ b/airflow-core/src/airflow/migrations/versions/0077_3_1_0_add_human_in_the_loop_response.py @@ -61,7 +61,7 @@ def upgrade(): Column("params", sqlalchemy_jsonfield.JSONField(json=json), nullable=False, default={}), Column("response_at", UtcDateTime, nullable=True), Column("user_id", String(128), nullable=True), - Column("response_content", sqlalchemy_jsonfield.JSONField(json=json), nullable=True), + Column("chosen_options", sqlalchemy_jsonfield.JSONField(json=json), nullable=True), Column("params_input", sqlalchemy_jsonfield.JSONField(json=json), nullable=False, default={}), ForeignKeyConstraint( ["ti_id"], diff --git a/airflow-core/src/airflow/models/hitl.py b/airflow-core/src/airflow/models/hitl.py index 52d4f0022ecd9..e1c51d4b63aa9 100644 --- a/airflow-core/src/airflow/models/hitl.py +++ b/airflow-core/src/airflow/models/hitl.py @@ -47,7 +47,7 @@ class HITLDetail(Base): # Response Content Detail response_at = Column(UtcDateTime, nullable=True) user_id = Column(String(128), nullable=True) - response_content = Column( + chosen_options = Column( sqlalchemy_jsonfield.JSONField(json=json), nullable=True, default=None, diff --git a/airflow-core/src/airflow/ui/openapi-gen/queries/common.ts b/airflow-core/src/airflow/ui/openapi-gen/queries/common.ts index 04ca2a2012203..8c33e0cbe20ac 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/queries/common.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/queries/common.ts @@ -620,16 +620,16 @@ export const UseDagVersionServiceGetDagVersionsKeyFn = ({ bundleName, bundleVers orderBy?: string; versionNumber?: number; }, queryKey?: Array) => [useDagVersionServiceGetDagVersionsKey, ...(queryKey ?? [{ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }])]; -export type HumanInTheLoopServiceGetHitlResponseDefaultResponse = Awaited>; -export type HumanInTheLoopServiceGetHitlResponseQueryResult = UseQueryResult; -export const useHumanInTheLoopServiceGetHitlResponseKey = "HumanInTheLoopServiceGetHitlResponse"; -export const UseHumanInTheLoopServiceGetHitlResponseKeyFn = ({ taskInstanceId }: { +export type HumanInTheLoopServiceGetHitlDetailDefaultResponse = Awaited>; +export type HumanInTheLoopServiceGetHitlDetailQueryResult = UseQueryResult; +export const useHumanInTheLoopServiceGetHitlDetailKey = "HumanInTheLoopServiceGetHitlDetail"; +export const UseHumanInTheLoopServiceGetHitlDetailKeyFn = ({ taskInstanceId }: { taskInstanceId: string; -}, queryKey?: Array) => [useHumanInTheLoopServiceGetHitlResponseKey, ...(queryKey ?? [{ taskInstanceId }])]; -export type HumanInTheLoopServiceGetHitlResponsesDefaultResponse = Awaited>; -export type HumanInTheLoopServiceGetHitlResponsesQueryResult = UseQueryResult; -export const useHumanInTheLoopServiceGetHitlResponsesKey = "HumanInTheLoopServiceGetHitlResponses"; -export const UseHumanInTheLoopServiceGetHitlResponsesKeyFn = (queryKey?: Array) => [useHumanInTheLoopServiceGetHitlResponsesKey, ...(queryKey ?? [])]; +}, queryKey?: Array) => [useHumanInTheLoopServiceGetHitlDetailKey, ...(queryKey ?? [{ taskInstanceId }])]; +export type HumanInTheLoopServiceGetHitlDetailsDefaultResponse = Awaited>; +export type HumanInTheLoopServiceGetHitlDetailsQueryResult = UseQueryResult; +export const useHumanInTheLoopServiceGetHitlDetailsKey = "HumanInTheLoopServiceGetHitlDetails"; +export const UseHumanInTheLoopServiceGetHitlDetailsKeyFn = (queryKey?: Array) => [useHumanInTheLoopServiceGetHitlDetailsKey, ...(queryKey ?? [])]; export type MonitorServiceGetHealthDefaultResponse = Awaited>; export type MonitorServiceGetHealthQueryResult = UseQueryResult; export const useMonitorServiceGetHealthKey = "MonitorServiceGetHealth"; @@ -762,7 +762,7 @@ export type PoolServiceBulkPoolsMutationResult = Awaited>; export type VariableServicePatchVariableMutationResult = Awaited>; export type VariableServiceBulkVariablesMutationResult = Awaited>; -export type HumanInTheLoopServiceUpdateHitlResponseMutationResult = Awaited>; +export type HumanInTheLoopServiceUpdateHitlDetailMutationResult = Awaited>; export type AssetServiceDeleteAssetQueuedEventsMutationResult = Awaited>; export type AssetServiceDeleteDagAssetQueuedEventsMutationResult = Awaited>; export type AssetServiceDeleteDagAssetQueuedEventMutationResult = Awaited>; diff --git a/airflow-core/src/airflow/ui/openapi-gen/queries/ensureQueryData.ts b/airflow-core/src/airflow/ui/openapi-gen/queries/ensureQueryData.ts index a5985fffe1ab0..f6c34b4a1c3fe 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/queries/ensureQueryData.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/queries/ensureQueryData.ts @@ -1172,23 +1172,23 @@ export const ensureUseDagVersionServiceGetDagVersionsData = (queryClient: QueryC versionNumber?: number; }) => queryClient.ensureQueryData({ queryKey: Common.UseDagVersionServiceGetDagVersionsKeyFn({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }), queryFn: () => DagVersionService.getDagVersions({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }) }); /** -* Get Hitl Response -* Get a Human-in-the-loop Response of a specific task instance. +* Get Hitl Detail +* Get a Human-in-the-loop detail of a specific task instance. * @param data The data for the request. * @param data.taskInstanceId -* @returns HITLResponseDetail Successful Response +* @returns HITLDetail Successful Response * @throws ApiError */ -export const ensureUseHumanInTheLoopServiceGetHitlResponseData = (queryClient: QueryClient, { taskInstanceId }: { +export const ensureUseHumanInTheLoopServiceGetHitlDetailData = (queryClient: QueryClient, { taskInstanceId }: { taskInstanceId: string; -}) => queryClient.ensureQueryData({ queryKey: Common.UseHumanInTheLoopServiceGetHitlResponseKeyFn({ taskInstanceId }), queryFn: () => HumanInTheLoopService.getHitlResponse({ taskInstanceId }) }); +}) => queryClient.ensureQueryData({ queryKey: Common.UseHumanInTheLoopServiceGetHitlDetailKeyFn({ taskInstanceId }), queryFn: () => HumanInTheLoopService.getHitlDetail({ taskInstanceId }) }); /** -* Get Hitl Responses -* Get Human-in-the-loop Responses. -* @returns HITLResponseDetailCollection Successful Response +* Get Hitl Details +* Get Human-in-the-loop details. +* @returns HITLDetailCollection Successful Response * @throws ApiError */ -export const ensureUseHumanInTheLoopServiceGetHitlResponsesData = (queryClient: QueryClient) => queryClient.ensureQueryData({ queryKey: Common.UseHumanInTheLoopServiceGetHitlResponsesKeyFn(), queryFn: () => HumanInTheLoopService.getHitlResponses() }); +export const ensureUseHumanInTheLoopServiceGetHitlDetailsData = (queryClient: QueryClient) => queryClient.ensureQueryData({ queryKey: Common.UseHumanInTheLoopServiceGetHitlDetailsKeyFn(), queryFn: () => HumanInTheLoopService.getHitlDetails() }); /** * Get Health * @returns HealthInfoResponse Successful Response diff --git a/airflow-core/src/airflow/ui/openapi-gen/queries/prefetch.ts b/airflow-core/src/airflow/ui/openapi-gen/queries/prefetch.ts index e33f4e8a274e3..5993440cc9bcc 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/queries/prefetch.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/queries/prefetch.ts @@ -1172,23 +1172,23 @@ export const prefetchUseDagVersionServiceGetDagVersions = (queryClient: QueryCli versionNumber?: number; }) => queryClient.prefetchQuery({ queryKey: Common.UseDagVersionServiceGetDagVersionsKeyFn({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }), queryFn: () => DagVersionService.getDagVersions({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }) }); /** -* Get Hitl Response -* Get a Human-in-the-loop Response of a specific task instance. +* Get Hitl Detail +* Get a Human-in-the-loop detail of a specific task instance. * @param data The data for the request. * @param data.taskInstanceId -* @returns HITLResponseDetail Successful Response +* @returns HITLDetail Successful Response * @throws ApiError */ -export const prefetchUseHumanInTheLoopServiceGetHitlResponse = (queryClient: QueryClient, { taskInstanceId }: { +export const prefetchUseHumanInTheLoopServiceGetHitlDetail = (queryClient: QueryClient, { taskInstanceId }: { taskInstanceId: string; -}) => queryClient.prefetchQuery({ queryKey: Common.UseHumanInTheLoopServiceGetHitlResponseKeyFn({ taskInstanceId }), queryFn: () => HumanInTheLoopService.getHitlResponse({ taskInstanceId }) }); +}) => queryClient.prefetchQuery({ queryKey: Common.UseHumanInTheLoopServiceGetHitlDetailKeyFn({ taskInstanceId }), queryFn: () => HumanInTheLoopService.getHitlDetail({ taskInstanceId }) }); /** -* Get Hitl Responses -* Get Human-in-the-loop Responses. -* @returns HITLResponseDetailCollection Successful Response +* Get Hitl Details +* Get Human-in-the-loop details. +* @returns HITLDetailCollection Successful Response * @throws ApiError */ -export const prefetchUseHumanInTheLoopServiceGetHitlResponses = (queryClient: QueryClient) => queryClient.prefetchQuery({ queryKey: Common.UseHumanInTheLoopServiceGetHitlResponsesKeyFn(), queryFn: () => HumanInTheLoopService.getHitlResponses() }); +export const prefetchUseHumanInTheLoopServiceGetHitlDetails = (queryClient: QueryClient) => queryClient.prefetchQuery({ queryKey: Common.UseHumanInTheLoopServiceGetHitlDetailsKeyFn(), queryFn: () => HumanInTheLoopService.getHitlDetails() }); /** * Get Health * @returns HealthInfoResponse Successful Response diff --git a/airflow-core/src/airflow/ui/openapi-gen/queries/queries.ts b/airflow-core/src/airflow/ui/openapi-gen/queries/queries.ts index 66c9177457fba..d23a6760a26e9 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/queries/queries.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/queries/queries.ts @@ -2,7 +2,7 @@ import { UseMutationOptions, UseQueryOptions, useMutation, useQuery } from "@tanstack/react-query"; import { AssetService, AuthLinksService, BackfillService, CalendarService, ConfigService, ConnectionService, DagParsingService, DagReportService, DagRunService, DagService, DagSourceService, DagStatsService, DagVersionService, DagWarningService, DashboardService, DependenciesService, EventLogService, ExperimentalService, ExtraLinksService, GridService, HumanInTheLoopService, ImportErrorService, JobService, LoginService, MonitorService, PluginService, PoolService, ProviderService, StructureService, TaskInstanceService, TaskService, VariableService, VersionService, XcomService } from "../requests/services.gen"; -import { BackfillPostBody, BulkBody_BulkTaskInstanceBody_, BulkBody_ConnectionBody_, BulkBody_PoolBody_, BulkBody_VariableBody_, ClearTaskInstancesBody, ConnectionBody, CreateAssetEventsBody, DAGPatchBody, DAGRunClearBody, DAGRunPatchBody, DAGRunsBatchBody, DagRunState, DagWarningType, PatchTaskInstanceBody, PoolBody, PoolPatchBody, TaskInstancesBatchBody, TriggerDAGRunPostBody, UpdateHITLResponsePayload, VariableBody, XComCreateBody, XComUpdateBody } from "../requests/types.gen"; +import { BackfillPostBody, BulkBody_BulkTaskInstanceBody_, BulkBody_ConnectionBody_, BulkBody_PoolBody_, BulkBody_VariableBody_, ClearTaskInstancesBody, ConnectionBody, CreateAssetEventsBody, DAGPatchBody, DAGRunClearBody, DAGRunPatchBody, DAGRunsBatchBody, DagRunState, DagWarningType, PatchTaskInstanceBody, PoolBody, PoolPatchBody, TaskInstancesBatchBody, TriggerDAGRunPostBody, UpdateHITLDetailPayload, VariableBody, XComCreateBody, XComUpdateBody } from "../requests/types.gen"; import * as Common from "./common"; /** * Get Assets @@ -1172,23 +1172,23 @@ export const useDagVersionServiceGetDagVersions = , "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagVersionServiceGetDagVersionsKeyFn({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }, queryKey), queryFn: () => DagVersionService.getDagVersions({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }) as TData, ...options }); /** -* Get Hitl Response -* Get a Human-in-the-loop Response of a specific task instance. +* Get Hitl Detail +* Get a Human-in-the-loop detail of a specific task instance. * @param data The data for the request. * @param data.taskInstanceId -* @returns HITLResponseDetail Successful Response +* @returns HITLDetail Successful Response * @throws ApiError */ -export const useHumanInTheLoopServiceGetHitlResponse = = unknown[]>({ taskInstanceId }: { +export const useHumanInTheLoopServiceGetHitlDetail = = unknown[]>({ taskInstanceId }: { taskInstanceId: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseHumanInTheLoopServiceGetHitlResponseKeyFn({ taskInstanceId }, queryKey), queryFn: () => HumanInTheLoopService.getHitlResponse({ taskInstanceId }) as TData, ...options }); +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseHumanInTheLoopServiceGetHitlDetailKeyFn({ taskInstanceId }, queryKey), queryFn: () => HumanInTheLoopService.getHitlDetail({ taskInstanceId }) as TData, ...options }); /** -* Get Hitl Responses -* Get Human-in-the-loop Responses. -* @returns HITLResponseDetailCollection Successful Response +* Get Hitl Details +* Get Human-in-the-loop details. +* @returns HITLDetailCollection Successful Response * @throws ApiError */ -export const useHumanInTheLoopServiceGetHitlResponses = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseHumanInTheLoopServiceGetHitlResponsesKeyFn(queryKey), queryFn: () => HumanInTheLoopService.getHitlResponses() as TData, ...options }); +export const useHumanInTheLoopServiceGetHitlDetails = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseHumanInTheLoopServiceGetHitlDetailsKeyFn(queryKey), queryFn: () => HumanInTheLoopService.getHitlDetails() as TData, ...options }); /** * Get Health * @returns HealthInfoResponse Successful Response @@ -2007,21 +2007,21 @@ export const useVariableServiceBulkVariables = ({ mutationFn: ({ requestBody }) => VariableService.bulkVariables({ requestBody }) as unknown as Promise, ...options }); /** -* Update Hitl Response -* Update a Human-in-the-loop response. +* Update Hitl Detail +* Update a Human-in-the-loop detail. * @param data The data for the request. * @param data.taskInstanceId * @param data.requestBody -* @returns HITLResponseContentDetail Successful Response +* @returns HITLDetailResponse Successful Response * @throws ApiError */ -export const useHumanInTheLoopServiceUpdateHitlResponse = (options?: Omit(options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ requestBody, taskInstanceId }) => HumanInTheLoopService.updateHitlResponse({ requestBody, taskInstanceId }) as unknown as Promise, ...options }); +}, TContext>({ mutationFn: ({ requestBody, taskInstanceId }) => HumanInTheLoopService.updateHitlDetail({ requestBody, taskInstanceId }) as unknown as Promise, ...options }); /** * Delete Asset Queued Events * Delete queued asset events for an asset. diff --git a/airflow-core/src/airflow/ui/openapi-gen/queries/suspense.ts b/airflow-core/src/airflow/ui/openapi-gen/queries/suspense.ts index 70c0c0c6f0ffc..4564289a972e8 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/queries/suspense.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/queries/suspense.ts @@ -1172,23 +1172,23 @@ export const useDagVersionServiceGetDagVersionsSuspense = , "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagVersionServiceGetDagVersionsKeyFn({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }, queryKey), queryFn: () => DagVersionService.getDagVersions({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }) as TData, ...options }); /** -* Get Hitl Response -* Get a Human-in-the-loop Response of a specific task instance. +* Get Hitl Detail +* Get a Human-in-the-loop detail of a specific task instance. * @param data The data for the request. * @param data.taskInstanceId -* @returns HITLResponseDetail Successful Response +* @returns HITLDetail Successful Response * @throws ApiError */ -export const useHumanInTheLoopServiceGetHitlResponseSuspense = = unknown[]>({ taskInstanceId }: { +export const useHumanInTheLoopServiceGetHitlDetailSuspense = = unknown[]>({ taskInstanceId }: { taskInstanceId: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseHumanInTheLoopServiceGetHitlResponseKeyFn({ taskInstanceId }, queryKey), queryFn: () => HumanInTheLoopService.getHitlResponse({ taskInstanceId }) as TData, ...options }); +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseHumanInTheLoopServiceGetHitlDetailKeyFn({ taskInstanceId }, queryKey), queryFn: () => HumanInTheLoopService.getHitlDetail({ taskInstanceId }) as TData, ...options }); /** -* Get Hitl Responses -* Get Human-in-the-loop Responses. -* @returns HITLResponseDetailCollection Successful Response +* Get Hitl Details +* Get Human-in-the-loop details. +* @returns HITLDetailCollection Successful Response * @throws ApiError */ -export const useHumanInTheLoopServiceGetHitlResponsesSuspense = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseHumanInTheLoopServiceGetHitlResponsesKeyFn(queryKey), queryFn: () => HumanInTheLoopService.getHitlResponses() as TData, ...options }); +export const useHumanInTheLoopServiceGetHitlDetailsSuspense = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseHumanInTheLoopServiceGetHitlDetailsKeyFn(queryKey), queryFn: () => HumanInTheLoopService.getHitlDetails() as TData, ...options }); /** * Get Health * @returns HealthInfoResponse Successful Response diff --git a/airflow-core/src/airflow/ui/openapi-gen/requests/schemas.gen.ts b/airflow-core/src/airflow/ui/openapi-gen/requests/schemas.gen.ts index f1f3b550a7093..6611f24e9b9dc 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/requests/schemas.gen.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/requests/schemas.gen.ts @@ -3408,37 +3408,7 @@ export const $FastAPIRootMiddlewareResponse = { description: 'Serializer for Plugin FastAPI root middleware responses.' } as const; -export const $HITLResponseContentDetail = { - properties: { - user_id: { - type: 'string', - title: 'User Id' - }, - response_at: { - type: 'string', - format: 'date-time', - title: 'Response At' - }, - response_content: { - items: { - type: 'string' - }, - type: 'array', - title: 'Response Content' - }, - params_input: { - additionalProperties: true, - type: 'object', - title: 'Params Input' - } - }, - type: 'object', - required: ['user_id', 'response_at', 'response_content'], - title: 'HITLResponseContentDetail', - description: 'Response of updating a Human-in-the-loop response.' -} as const; - -export const $HITLResponseDetail = { +export const $HITLDetail = { properties: { ti_id: { type: 'string', @@ -3540,18 +3510,18 @@ export const $HITLResponseDetail = { }, type: 'object', required: ['ti_id', 'options', 'subject'], - title: 'HITLResponseDetail', - description: 'Schema for Human-in-the-loop response.' + title: 'HITLDetail', + description: 'Schema for Human-in-the-loop detail.' } as const; -export const $HITLResponseDetailCollection = { +export const $HITLDetailCollection = { properties: { - hitl_responses: { + hitl_details: { items: { - '$ref': '#/components/schemas/HITLResponseDetail' + '$ref': '#/components/schemas/HITLDetail' }, type: 'array', - title: 'Hitl Responses' + title: 'Hitl Details' }, total_entries: { type: 'integer', @@ -3559,9 +3529,39 @@ export const $HITLResponseDetailCollection = { } }, type: 'object', - required: ['hitl_responses', 'total_entries'], - title: 'HITLResponseDetailCollection', - description: 'Schema for a collection of Human-in-the-loop responses.' + required: ['hitl_details', 'total_entries'], + title: 'HITLDetailCollection', + description: 'Schema for a collection of Human-in-the-loop details.' +} as const; + +export const $HITLDetailResponse = { + properties: { + user_id: { + type: 'string', + title: 'User Id' + }, + response_at: { + type: 'string', + format: 'date-time', + title: 'Response At' + }, + response_content: { + items: { + type: 'string' + }, + type: 'array', + title: 'Response Content' + }, + params_input: { + additionalProperties: true, + type: 'object', + title: 'Params Input' + } + }, + type: 'object', + required: ['user_id', 'response_at', 'response_content'], + title: 'HITLDetailResponse', + description: 'Response of updating a Human-in-the-loop detail.' } as const; export const $HTTPExceptionResponse = { @@ -5852,7 +5852,7 @@ export const $TriggererInfoResponse = { description: 'Triggerer info serializer for responses.' } as const; -export const $UpdateHITLResponsePayload = { +export const $UpdateHITLDetailPayload = { properties: { response_content: { items: { @@ -5869,8 +5869,8 @@ export const $UpdateHITLResponsePayload = { }, type: 'object', required: ['response_content'], - title: 'UpdateHITLResponsePayload', - description: 'Schema for updating the content of a Human-in-the-loop response.' + title: 'UpdateHITLDetailPayload', + description: 'Schema for updating the content of a Human-in-the-loop detail.' } as const; export const $ValidationError = { diff --git a/airflow-core/src/airflow/ui/openapi-gen/requests/services.gen.ts b/airflow-core/src/airflow/ui/openapi-gen/requests/services.gen.ts index 2c224d960aa43..3a9443b6def48 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/requests/services.gen.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/requests/services.gen.ts @@ -3,7 +3,7 @@ import type { CancelablePromise } from './core/CancelablePromise'; import { OpenAPI } from './core/OpenAPI'; import { request as __request } from './core/request'; -import type { GetAssetsData, GetAssetsResponse, GetAssetAliasesData, GetAssetAliasesResponse, GetAssetAliasData, GetAssetAliasResponse, GetAssetEventsData, GetAssetEventsResponse, CreateAssetEventData, CreateAssetEventResponse, MaterializeAssetData, MaterializeAssetResponse, GetAssetQueuedEventsData, GetAssetQueuedEventsResponse, DeleteAssetQueuedEventsData, DeleteAssetQueuedEventsResponse, GetAssetData, GetAssetResponse, GetDagAssetQueuedEventsData, GetDagAssetQueuedEventsResponse, DeleteDagAssetQueuedEventsData, DeleteDagAssetQueuedEventsResponse, GetDagAssetQueuedEventData, GetDagAssetQueuedEventResponse, DeleteDagAssetQueuedEventData, DeleteDagAssetQueuedEventResponse, NextRunAssetsData, NextRunAssetsResponse, ListBackfillsData, ListBackfillsResponse, CreateBackfillData, CreateBackfillResponse, GetBackfillData, GetBackfillResponse, PauseBackfillData, PauseBackfillResponse, UnpauseBackfillData, UnpauseBackfillResponse, CancelBackfillData, CancelBackfillResponse, CreateBackfillDryRunData, CreateBackfillDryRunResponse, ListBackfillsUiData, ListBackfillsUiResponse, DeleteConnectionData, DeleteConnectionResponse, GetConnectionData, GetConnectionResponse, PatchConnectionData, PatchConnectionResponse, GetConnectionsData, GetConnectionsResponse, PostConnectionData, PostConnectionResponse, BulkConnectionsData, BulkConnectionsResponse, TestConnectionData, TestConnectionResponse, CreateDefaultConnectionsResponse, HookMetaDataResponse, GetDagRunData, GetDagRunResponse, DeleteDagRunData, DeleteDagRunResponse, PatchDagRunData, PatchDagRunResponse, GetUpstreamAssetEventsData, GetUpstreamAssetEventsResponse, ClearDagRunData, ClearDagRunResponse, GetDagRunsData, GetDagRunsResponse, TriggerDagRunData, TriggerDagRunResponse, WaitDagRunUntilFinishedData, WaitDagRunUntilFinishedResponse, GetListDagRunsBatchData, GetListDagRunsBatchResponse, GetDagSourceData, GetDagSourceResponse, GetDagStatsData, GetDagStatsResponse, GetDagReportsData, GetDagReportsResponse, GetConfigData, GetConfigResponse, GetConfigValueData, GetConfigValueResponse, GetConfigsResponse, ListDagWarningsData, ListDagWarningsResponse, GetDagsData, GetDagsResponse, PatchDagsData, PatchDagsResponse, GetDagData, GetDagResponse, PatchDagData, PatchDagResponse, DeleteDagData, DeleteDagResponse, GetDagDetailsData, GetDagDetailsResponse, FavoriteDagData, FavoriteDagResponse, UnfavoriteDagData, UnfavoriteDagResponse, GetDagTagsData, GetDagTagsResponse, GetDagsUiData, GetDagsUiResponse, GetEventLogData, GetEventLogResponse, GetEventLogsData, GetEventLogsResponse, GetExtraLinksData, GetExtraLinksResponse, GetTaskInstanceData, GetTaskInstanceResponse, PatchTaskInstanceData, PatchTaskInstanceResponse, DeleteTaskInstanceData, DeleteTaskInstanceResponse, GetMappedTaskInstancesData, GetMappedTaskInstancesResponse, GetTaskInstanceDependenciesByMapIndexData, GetTaskInstanceDependenciesByMapIndexResponse, GetTaskInstanceDependenciesData, GetTaskInstanceDependenciesResponse, GetTaskInstanceTriesData, GetTaskInstanceTriesResponse, GetMappedTaskInstanceTriesData, GetMappedTaskInstanceTriesResponse, GetMappedTaskInstanceData, GetMappedTaskInstanceResponse, PatchTaskInstanceByMapIndexData, PatchTaskInstanceByMapIndexResponse, GetTaskInstancesData, GetTaskInstancesResponse, BulkTaskInstancesData, BulkTaskInstancesResponse, GetTaskInstancesBatchData, GetTaskInstancesBatchResponse, GetTaskInstanceTryDetailsData, GetTaskInstanceTryDetailsResponse, GetMappedTaskInstanceTryDetailsData, GetMappedTaskInstanceTryDetailsResponse, PostClearTaskInstancesData, PostClearTaskInstancesResponse, PatchTaskInstanceDryRunByMapIndexData, PatchTaskInstanceDryRunByMapIndexResponse, PatchTaskInstanceDryRunData, PatchTaskInstanceDryRunResponse, GetLogData, GetLogResponse, GetExternalLogUrlData, GetExternalLogUrlResponse, GetImportErrorData, GetImportErrorResponse, GetImportErrorsData, GetImportErrorsResponse, GetJobsData, GetJobsResponse, GetPluginsData, GetPluginsResponse, ImportErrorsResponse, DeletePoolData, DeletePoolResponse, GetPoolData, GetPoolResponse, PatchPoolData, PatchPoolResponse, GetPoolsData, GetPoolsResponse, PostPoolData, PostPoolResponse, BulkPoolsData, BulkPoolsResponse, GetProvidersData, GetProvidersResponse, GetXcomEntryData, GetXcomEntryResponse, UpdateXcomEntryData, UpdateXcomEntryResponse, GetXcomEntriesData, GetXcomEntriesResponse, CreateXcomEntryData, CreateXcomEntryResponse, GetTasksData, GetTasksResponse, GetTaskData, GetTaskResponse, DeleteVariableData, DeleteVariableResponse, GetVariableData, GetVariableResponse, PatchVariableData, PatchVariableResponse, GetVariablesData, GetVariablesResponse, PostVariableData, PostVariableResponse, BulkVariablesData, BulkVariablesResponse, ReparseDagFileData, ReparseDagFileResponse, GetDagVersionData, GetDagVersionResponse, GetDagVersionsData, GetDagVersionsResponse, UpdateHitlResponseData, UpdateHitlResponseResponse, GetHitlResponseData, GetHitlResponseResponse, GetHitlResponsesResponse, GetHealthResponse, GetVersionResponse, LoginData, LoginResponse, LogoutData, LogoutResponse, GetAuthMenusResponse, GetDependenciesData, GetDependenciesResponse, HistoricalMetricsData, HistoricalMetricsResponse, DagStatsResponse2, StructureDataData, StructureDataResponse2, GetDagStructureData, GetDagStructureResponse, GetGridRunsData, GetGridRunsResponse, GetGridTiSummariesData, GetGridTiSummariesResponse, GetLatestRunData, GetLatestRunResponse, GetCalendarData, GetCalendarResponse } from './types.gen'; +import type { GetAssetsData, GetAssetsResponse, GetAssetAliasesData, GetAssetAliasesResponse, GetAssetAliasData, GetAssetAliasResponse, GetAssetEventsData, GetAssetEventsResponse, CreateAssetEventData, CreateAssetEventResponse, MaterializeAssetData, MaterializeAssetResponse, GetAssetQueuedEventsData, GetAssetQueuedEventsResponse, DeleteAssetQueuedEventsData, DeleteAssetQueuedEventsResponse, GetAssetData, GetAssetResponse, GetDagAssetQueuedEventsData, GetDagAssetQueuedEventsResponse, DeleteDagAssetQueuedEventsData, DeleteDagAssetQueuedEventsResponse, GetDagAssetQueuedEventData, GetDagAssetQueuedEventResponse, DeleteDagAssetQueuedEventData, DeleteDagAssetQueuedEventResponse, NextRunAssetsData, NextRunAssetsResponse, ListBackfillsData, ListBackfillsResponse, CreateBackfillData, CreateBackfillResponse, GetBackfillData, GetBackfillResponse, PauseBackfillData, PauseBackfillResponse, UnpauseBackfillData, UnpauseBackfillResponse, CancelBackfillData, CancelBackfillResponse, CreateBackfillDryRunData, CreateBackfillDryRunResponse, ListBackfillsUiData, ListBackfillsUiResponse, DeleteConnectionData, DeleteConnectionResponse, GetConnectionData, GetConnectionResponse, PatchConnectionData, PatchConnectionResponse, GetConnectionsData, GetConnectionsResponse, PostConnectionData, PostConnectionResponse, BulkConnectionsData, BulkConnectionsResponse, TestConnectionData, TestConnectionResponse, CreateDefaultConnectionsResponse, HookMetaDataResponse, GetDagRunData, GetDagRunResponse, DeleteDagRunData, DeleteDagRunResponse, PatchDagRunData, PatchDagRunResponse, GetUpstreamAssetEventsData, GetUpstreamAssetEventsResponse, ClearDagRunData, ClearDagRunResponse, GetDagRunsData, GetDagRunsResponse, TriggerDagRunData, TriggerDagRunResponse, WaitDagRunUntilFinishedData, WaitDagRunUntilFinishedResponse, GetListDagRunsBatchData, GetListDagRunsBatchResponse, GetDagSourceData, GetDagSourceResponse, GetDagStatsData, GetDagStatsResponse, GetDagReportsData, GetDagReportsResponse, GetConfigData, GetConfigResponse, GetConfigValueData, GetConfigValueResponse, GetConfigsResponse, ListDagWarningsData, ListDagWarningsResponse, GetDagsData, GetDagsResponse, PatchDagsData, PatchDagsResponse, GetDagData, GetDagResponse, PatchDagData, PatchDagResponse, DeleteDagData, DeleteDagResponse, GetDagDetailsData, GetDagDetailsResponse, FavoriteDagData, FavoriteDagResponse, UnfavoriteDagData, UnfavoriteDagResponse, GetDagTagsData, GetDagTagsResponse, GetDagsUiData, GetDagsUiResponse, GetEventLogData, GetEventLogResponse, GetEventLogsData, GetEventLogsResponse, GetExtraLinksData, GetExtraLinksResponse, GetTaskInstanceData, GetTaskInstanceResponse, PatchTaskInstanceData, PatchTaskInstanceResponse, DeleteTaskInstanceData, DeleteTaskInstanceResponse, GetMappedTaskInstancesData, GetMappedTaskInstancesResponse, GetTaskInstanceDependenciesByMapIndexData, GetTaskInstanceDependenciesByMapIndexResponse, GetTaskInstanceDependenciesData, GetTaskInstanceDependenciesResponse, GetTaskInstanceTriesData, GetTaskInstanceTriesResponse, GetMappedTaskInstanceTriesData, GetMappedTaskInstanceTriesResponse, GetMappedTaskInstanceData, GetMappedTaskInstanceResponse, PatchTaskInstanceByMapIndexData, PatchTaskInstanceByMapIndexResponse, GetTaskInstancesData, GetTaskInstancesResponse, BulkTaskInstancesData, BulkTaskInstancesResponse, GetTaskInstancesBatchData, GetTaskInstancesBatchResponse, GetTaskInstanceTryDetailsData, GetTaskInstanceTryDetailsResponse, GetMappedTaskInstanceTryDetailsData, GetMappedTaskInstanceTryDetailsResponse, PostClearTaskInstancesData, PostClearTaskInstancesResponse, PatchTaskInstanceDryRunByMapIndexData, PatchTaskInstanceDryRunByMapIndexResponse, PatchTaskInstanceDryRunData, PatchTaskInstanceDryRunResponse, GetLogData, GetLogResponse, GetExternalLogUrlData, GetExternalLogUrlResponse, GetImportErrorData, GetImportErrorResponse, GetImportErrorsData, GetImportErrorsResponse, GetJobsData, GetJobsResponse, GetPluginsData, GetPluginsResponse, ImportErrorsResponse, DeletePoolData, DeletePoolResponse, GetPoolData, GetPoolResponse, PatchPoolData, PatchPoolResponse, GetPoolsData, GetPoolsResponse, PostPoolData, PostPoolResponse, BulkPoolsData, BulkPoolsResponse, GetProvidersData, GetProvidersResponse, GetXcomEntryData, GetXcomEntryResponse, UpdateXcomEntryData, UpdateXcomEntryResponse, GetXcomEntriesData, GetXcomEntriesResponse, CreateXcomEntryData, CreateXcomEntryResponse, GetTasksData, GetTasksResponse, GetTaskData, GetTaskResponse, DeleteVariableData, DeleteVariableResponse, GetVariableData, GetVariableResponse, PatchVariableData, PatchVariableResponse, GetVariablesData, GetVariablesResponse, PostVariableData, PostVariableResponse, BulkVariablesData, BulkVariablesResponse, ReparseDagFileData, ReparseDagFileResponse, GetDagVersionData, GetDagVersionResponse, GetDagVersionsData, GetDagVersionsResponse, UpdateHitlDetailData, UpdateHitlDetailResponse, GetHitlDetailData, GetHitlDetailResponse, GetHitlDetailsResponse, GetHealthResponse, GetVersionResponse, LoginData, LoginResponse, LogoutData, LogoutResponse, GetAuthMenusResponse, GetDependenciesData, GetDependenciesResponse, HistoricalMetricsData, HistoricalMetricsResponse, DagStatsResponse2, StructureDataData, StructureDataResponse2, GetDagStructureData, GetDagStructureResponse, GetGridRunsData, GetGridRunsResponse, GetGridTiSummariesData, GetGridTiSummariesResponse, GetLatestRunData, GetLatestRunResponse, GetCalendarData, GetCalendarResponse } from './types.gen'; export class AssetService { /** @@ -3362,18 +3362,18 @@ export class DagVersionService { export class HumanInTheLoopService { /** - * Update Hitl Response - * Update a Human-in-the-loop response. + * Update Hitl Detail + * Update a Human-in-the-loop detail. * @param data The data for the request. * @param data.taskInstanceId * @param data.requestBody - * @returns HITLResponseContentDetail Successful Response + * @returns HITLDetailResponse Successful Response * @throws ApiError */ - public static updateHitlResponse(data: UpdateHitlResponseData): CancelablePromise { + public static updateHitlDetail(data: UpdateHitlDetailData): CancelablePromise { return __request(OpenAPI, { method: 'PATCH', - url: '/api/v2/hitl-responses/{task_instance_id}', + url: '/api/v2/hitl-details/{task_instance_id}', path: { task_instance_id: data.taskInstanceId }, @@ -3390,17 +3390,17 @@ export class HumanInTheLoopService { } /** - * Get Hitl Response - * Get a Human-in-the-loop Response of a specific task instance. + * Get Hitl Detail + * Get a Human-in-the-loop detail of a specific task instance. * @param data The data for the request. * @param data.taskInstanceId - * @returns HITLResponseDetail Successful Response + * @returns HITLDetail Successful Response * @throws ApiError */ - public static getHitlResponse(data: GetHitlResponseData): CancelablePromise { + public static getHitlDetail(data: GetHitlDetailData): CancelablePromise { return __request(OpenAPI, { method: 'GET', - url: '/api/v2/hitl-responses/{task_instance_id}', + url: '/api/v2/hitl-details/{task_instance_id}', path: { task_instance_id: data.taskInstanceId }, @@ -3414,15 +3414,15 @@ export class HumanInTheLoopService { } /** - * Get Hitl Responses - * Get Human-in-the-loop Responses. - * @returns HITLResponseDetailCollection Successful Response + * Get Hitl Details + * Get Human-in-the-loop details. + * @returns HITLDetailCollection Successful Response * @throws ApiError */ - public static getHitlResponses(): CancelablePromise { + public static getHitlDetails(): CancelablePromise { return __request(OpenAPI, { method: 'GET', - url: '/api/v2/hitl-responses/', + url: '/api/v2/hitl-details/', errors: { 401: 'Unauthorized', 403: 'Forbidden' diff --git a/airflow-core/src/airflow/ui/openapi-gen/requests/types.gen.ts b/airflow-core/src/airflow/ui/openapi-gen/requests/types.gen.ts index 7f11cbee8e887..e45ab6ea185f8 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/requests/types.gen.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/requests/types.gen.ts @@ -918,21 +918,9 @@ export type FastAPIRootMiddlewareResponse = { }; /** - * Response of updating a Human-in-the-loop response. + * Schema for Human-in-the-loop detail. */ -export type HITLResponseContentDetail = { - user_id: string; - response_at: string; - response_content: Array<(string)>; - params_input?: { - [key: string]: unknown; - }; -}; - -/** - * Schema for Human-in-the-loop response. - */ -export type HITLResponseDetail = { +export type HITLDetail = { ti_id: string; options: Array<(string)>; subject: string; @@ -952,13 +940,25 @@ export type HITLResponseDetail = { }; /** - * Schema for a collection of Human-in-the-loop responses. + * Schema for a collection of Human-in-the-loop details. */ -export type HITLResponseDetailCollection = { - hitl_responses: Array; +export type HITLDetailCollection = { + hitl_details: Array; total_entries: number; }; +/** + * Response of updating a Human-in-the-loop detail. + */ +export type HITLDetailResponse = { + user_id: string; + response_at: string; + response_content: Array<(string)>; + params_input?: { + [key: string]: unknown; + }; +}; + /** * HTTPException Model used for error response. */ @@ -1472,9 +1472,9 @@ export type TriggererInfoResponse = { }; /** - * Schema for updating the content of a Human-in-the-loop response. + * Schema for updating the content of a Human-in-the-loop detail. */ -export type UpdateHITLResponsePayload = { +export type UpdateHITLDetailPayload = { response_content: Array<(string)>; params_input?: { [key: string]: unknown; @@ -2899,20 +2899,20 @@ export type GetDagVersionsData = { export type GetDagVersionsResponse = DAGVersionCollectionResponse; -export type UpdateHitlResponseData = { - requestBody: UpdateHITLResponsePayload; +export type UpdateHitlDetailData = { + requestBody: UpdateHITLDetailPayload; taskInstanceId: string; }; -export type UpdateHitlResponseResponse = HITLResponseContentDetail; +export type UpdateHitlDetailResponse = HITLDetailResponse; -export type GetHitlResponseData = { +export type GetHitlDetailData = { taskInstanceId: string; }; -export type GetHitlResponseResponse = HITLResponseDetail; +export type GetHitlDetailResponse = HITLDetail; -export type GetHitlResponsesResponse = HITLResponseDetailCollection; +export type GetHitlDetailsResponse = HITLDetailCollection; export type GetHealthResponse = HealthInfoResponse; @@ -5860,14 +5860,14 @@ export type $OpenApiTs = { }; }; }; - '/api/v2/hitl-responses/{task_instance_id}': { + '/api/v2/hitl-details/{task_instance_id}': { patch: { - req: UpdateHitlResponseData; + req: UpdateHitlDetailData; res: { /** * Successful Response */ - 200: HITLResponseContentDetail; + 200: HITLDetailResponse; /** * Unauthorized */ @@ -5891,12 +5891,12 @@ export type $OpenApiTs = { }; }; get: { - req: GetHitlResponseData; + req: GetHitlDetailData; res: { /** * Successful Response */ - 200: HITLResponseDetail; + 200: HITLDetail; /** * Unauthorized */ @@ -5916,13 +5916,13 @@ export type $OpenApiTs = { }; }; }; - '/api/v2/hitl-responses/': { + '/api/v2/hitl-details/': { get: { res: { /** * Successful Response */ - 200: HITLResponseDetailCollection; + 200: HITLDetailCollection; /** * Unauthorized */ diff --git a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_hitl.py b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_hitl.py index 8f6bc42aa7756..d0b02ba03def5 100644 --- a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_hitl.py +++ b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_hitl.py @@ -71,7 +71,7 @@ def expected_sample_hitl_detail_dict(sample_ti) -> dict[str, Any]: "params": {"input_1": 1}, "params_input": {}, "response_at": None, - "response_content": None, + "chosen_options": None, "response_received": False, "subject": "This is subject", "ti_id": sample_ti.id, @@ -85,13 +85,13 @@ class TestUpdateHITLDetailEndpoint: def test_should_respond_200_with_existing_response(self, test_client, sample_ti): response = test_client.patch( f"/hitl-details/{sample_ti.id}", - json={"response_content": ["Approve"], "params_input": {"input_1": 2}}, + json={"chosen_options": ["Approve"], "params_input": {"input_1": 2}}, ) assert response.status_code == 200 assert response.json() == { "params_input": {"input_1": 2}, - "response_content": ["Approve"], + "chosen_options": ["Approve"], "user_id": "test", "response_at": "2025-07-03T00:00:00Z", } @@ -111,12 +111,12 @@ def test_should_respond_404(self, test_client, sample_ti): def test_should_respond_409(self, test_client, sample_ti, expected_sample_hitl_detail_dict): response = test_client.patch( f"/hitl-details/{sample_ti.id}", - json={"response_content": ["Approve"], "params_input": {"input_1": 2}}, + json={"chosen_options": ["Approve"], "params_input": {"input_1": 2}}, ) expected_response = { "params_input": {"input_1": 2}, - "response_content": ["Approve"], + "chosen_options": ["Approve"], "user_id": "test", "response_at": "2025-07-03T00:00:00Z", } @@ -125,7 +125,7 @@ def test_should_respond_409(self, test_client, sample_ti, expected_sample_hitl_d response = test_client.patch( f"/hitl-details/{sample_ti.id}", - json={"response_content": ["Approve"], "params_input": {"input_1": 2}}, + json={"chosen_options": ["Approve"], "params_input": {"input_1": 2}}, ) assert response.status_code == 409 assert response.json() == { diff --git a/airflow-core/tests/unit/api_fastapi/execution_api/versions/head/test_hitl.py b/airflow-core/tests/unit/api_fastapi/execution_api/versions/head/test_hitl.py index 3266faa98ba60..ff9da89fbe5fe 100644 --- a/airflow-core/tests/unit/api_fastapi/execution_api/versions/head/test_hitl.py +++ b/airflow-core/tests/unit/api_fastapi/execution_api/versions/head/test_hitl.py @@ -70,7 +70,7 @@ def expected_sample_hitl_detail_dict(sample_ti) -> dict[str, Any]: "params": {"input_1": 1}, "params_input": {}, "response_at": None, - "response_content": None, + "chosen_options": None, "response_received": False, "subject": "This is subject", "ti_id": sample_ti.id, @@ -113,7 +113,7 @@ def test_update_hitl_detail(client, sample_ti) -> None: f"/execution/hitl-details/{sample_ti.id}", json={ "ti_id": sample_ti.id, - "response_content": ["Reject"], + "chosen_options": ["Reject"], "params_input": {"input_1": 2}, }, ) @@ -121,7 +121,7 @@ def test_update_hitl_detail(client, sample_ti) -> None: assert response.json() == { "params_input": {"input_1": 2}, "response_at": "2025-07-03T00:00:00Z", - "response_content": ["Reject"], + "chosen_options": ["Reject"], "response_received": True, "user_id": "Fallback to default", } @@ -134,7 +134,7 @@ def test_get_hitl_detail(client, sample_ti) -> None: assert response.json() == { "params_input": {}, "response_at": None, - "response_content": None, + "chosen_options": None, "response_received": False, "user_id": None, } diff --git a/airflow-ctl/src/airflowctl/api/datamodels/generated.py b/airflow-ctl/src/airflowctl/api/datamodels/generated.py index 9dd55fac264e4..71bebe428ceb4 100644 --- a/airflow-ctl/src/airflowctl/api/datamodels/generated.py +++ b/airflow-ctl/src/airflowctl/api/datamodels/generated.py @@ -587,7 +587,7 @@ class HITLDetail(BaseModel): params: Annotated[dict[str, Any] | None, Field(title="Params")] = None user_id: Annotated[str | None, Field(title="User Id")] = None response_at: Annotated[datetime | None, Field(title="Response At")] = None - response_content: Annotated[list[str] | None, Field(title="Response Content")] = None + chosen_options: Annotated[list[str] | None, Field(title="Response Content")] = None params_input: Annotated[dict[str, Any] | None, Field(title="Params Input")] = None response_received: Annotated[bool | None, Field(title="Response Received")] = False @@ -608,7 +608,7 @@ class HITLDetailResponse(BaseModel): user_id: Annotated[str, Field(title="User Id")] response_at: Annotated[datetime, Field(title="Response At")] - response_content: Annotated[list[str], Field(title="Response Content")] + chosen_options: Annotated[list[str], Field(title="Response Content")] params_input: Annotated[dict[str, Any] | None, Field(title="Params Input")] = None @@ -943,7 +943,7 @@ class UpdateHITLDetailPayload(BaseModel): Schema for updating the content of a Human-in-the-loop detail. """ - response_content: Annotated[list[str], Field(title="Response Content")] + chosen_options: Annotated[list[str], Field(title="Response Content")] params_input: Annotated[dict[str, Any] | None, Field(title="Params Input")] = None diff --git a/providers/standard/src/airflow/providers/standard/operators/hitl.py b/providers/standard/src/airflow/providers/standard/operators/hitl.py index 11dda9e2d3982..6a110b0324c2b 100644 --- a/providers/standard/src/airflow/providers/standard/operators/hitl.py +++ b/providers/standard/src/airflow/providers/standard/operators/hitl.py @@ -135,18 +135,18 @@ def execute_complete(self, context: Context, event: dict[str, Any]) -> Any: if "error" in event: raise HITLTriggerEventError(event["error"]) - response_content = event["response_content"] + chosen_options = event["chosen_options"] params_input = event["params_input"] or {} - self.validate_response_content(response_content) + self.validate_chosen_options(chosen_options) self.validate_params_input(params_input) return HITLTriggerEventSuccessPayload( - response_content=response_content, + chosen_options=chosen_options, params_input=params_input, ) - def validate_response_content(self, response_content: list[str]) -> None: + def validate_chosen_options(self, chosen_options: list[str]) -> None: """Check whether user provide valid response.""" - if diff := set(response_content) - set(self.options): + if diff := set(chosen_options) - set(self.options): raise ValueError(f"Responses {diff} not in {self.options}") def validate_params_input(self, params_input: Mapping) -> None: diff --git a/providers/standard/src/airflow/providers/standard/triggers/hitl.py b/providers/standard/src/airflow/providers/standard/triggers/hitl.py index 23df73355e98c..574eb5b84abf5 100644 --- a/providers/standard/src/airflow/providers/standard/triggers/hitl.py +++ b/providers/standard/src/airflow/providers/standard/triggers/hitl.py @@ -32,7 +32,7 @@ from airflow.sdk.execution_time.hitl import ( get_hitl_detail_content_detail, - update_htil_response_content_detail, + update_htil_detail_response, ) from airflow.triggers.base import BaseTrigger, TriggerEvent from airflow.utils import timezone @@ -41,7 +41,7 @@ class HITLTriggerEventSuccessPayload(TypedDict, total=False): """Minimum required keys for a success Human-in-the-loop TriggerEvent.""" - response_content: list[str] + chosen_options: list[str] params_input: dict[str, Any] @@ -105,25 +105,25 @@ async def run(self) -> AsyncIterator[TriggerEvent]: ) return - await sync_to_async(update_htil_response_content_detail)( + await sync_to_async(update_htil_detail_response)( ti_id=self.ti_id, - response_content=self.default, + chosen_options=self.default, params_input=self.params, ) yield TriggerEvent( HITLTriggerEventSuccessPayload( - response_content=self.default, + chosen_options=self.default, params_input=self.params, ) ) return resp = await sync_to_async(get_hitl_detail_content_detail)(ti_id=self.ti_id) - if resp.response_received and resp.response_content: + if resp.response_received and resp.chosen_options: self.log.info("Responded by %s at %s", resp.user_id, resp.response_at) yield TriggerEvent( HITLTriggerEventSuccessPayload( - response_content=resp.response_content, + chosen_options=resp.chosen_options, params_input=resp.params_input, ) ) diff --git a/providers/standard/tests/unit/standard/operators/test_hitl.py b/providers/standard/tests/unit/standard/operators/test_hitl.py index f146eb7bf71cd..d8c31b39d2b79 100644 --- a/providers/standard/tests/unit/standard/operators/test_hitl.py +++ b/providers/standard/tests/unit/standard/operators/test_hitl.py @@ -97,7 +97,7 @@ def test_execute(self, dag_maker, session) -> None: assert hitl_detail_model.params == {"input_1": 1} assert hitl_detail_model.response_at is None assert hitl_detail_model.user_id is None - assert hitl_detail_model.response_content is None + assert hitl_detail_model.chosen_options is None assert hitl_detail_model.params_input == {} registered_trigger = session.scalar( @@ -142,13 +142,13 @@ def test_execute_complete(self) -> None: ret = hitl_op.execute_complete( context={}, - event={"response_content": ["1"], "params_input": {"input": 2}}, + event={"chosen_options": ["1"], "params_input": {"input": 2}}, ) - assert ret["response_content"] == ["1"] + assert ret["chosen_options"] == ["1"] assert ret["params_input"] == {"input": 2} - def test_validate_response_content_with_invalid_content(self) -> None: + def test_validate_chosen_options_with_invalid_content(self) -> None: hitl_op = HITLOperator( task_id="hitl_test", subject="This is subject", @@ -161,7 +161,7 @@ def test_validate_response_content_with_invalid_content(self) -> None: hitl_op.execute_complete( context={}, event={ - "response_content": ["not exists"], + "chosen_options": ["not exists"], "params_input": {"input": 2}, }, ) @@ -179,7 +179,7 @@ def test_validate_params_input_with_invalid_input(self) -> None: hitl_op.execute_complete( context={}, event={ - "response_content": ["1"], + "chosen_options": ["1"], "params_input": {"no such key": 2, "input": 333}, }, ) diff --git a/providers/standard/tests/unit/standard/triggers/test_hitl.py b/providers/standard/tests/unit/standard/triggers/test_hitl.py index 009e90d163f72..29a4cc14e0fec 100644 --- a/providers/standard/tests/unit/standard/triggers/test_hitl.py +++ b/providers/standard/tests/unit/standard/triggers/test_hitl.py @@ -63,7 +63,7 @@ def test_serialization(self): @pytest.mark.db_test @pytest.mark.asyncio - @mock.patch("airflow.sdk.execution_time.hitl.update_htil_response_content_detail") + @mock.patch("airflow.sdk.execution_time.hitl.update_htil_detail_response") async def test_run_fallback_to_default_due_to_timeout(self, mock_update, mock_supervisor_comms): trigger = HITLTrigger( ti_id=TI_ID, @@ -78,7 +78,7 @@ async def test_run_fallback_to_default_due_to_timeout(self, mock_update, mock_su response_received=False, user_id=None, response_at=None, - response_content=None, + chosen_options=None, params_input={}, ) @@ -88,14 +88,14 @@ async def test_run_fallback_to_default_due_to_timeout(self, mock_update, mock_su event = await trigger_task assert event == TriggerEvent( HITLTriggerEventSuccessPayload( - response_content=["1"], + chosen_options=["1"], params_input={"input": 1}, ) ) @pytest.mark.db_test @pytest.mark.asyncio - @mock.patch("airflow.sdk.execution_time.hitl.update_htil_response_content_detail") + @mock.patch("airflow.sdk.execution_time.hitl.update_htil_detail_response") async def test_run(self, mock_update, mock_supervisor_comms): trigger = HITLTrigger( ti_id=TI_ID, @@ -110,7 +110,7 @@ async def test_run(self, mock_update, mock_supervisor_comms): response_received=True, user_id="test", response_at=utcnow(), - response_content=["3"], + chosen_options=["3"], params_input={"input": 50}, ) @@ -120,7 +120,7 @@ async def test_run(self, mock_update, mock_supervisor_comms): event = await trigger_task assert event == TriggerEvent( HITLTriggerEventSuccessPayload( - response_content=["3"], + chosen_options=["3"], params_input={"input": 50}, ) ) diff --git a/task-sdk/src/airflow/sdk/api/client.py b/task-sdk/src/airflow/sdk/api/client.py index 70ce783853d60..b5663ca04854c 100644 --- a/task-sdk/src/airflow/sdk/api/client.py +++ b/task-sdk/src/airflow/sdk/api/client.py @@ -665,13 +665,13 @@ def update_response( self, *, ti_id: uuid.UUID, - response_content: list[str], + chosen_options: list[str], params_input: dict[str, Any], ) -> HITLDetailResponse: """Update an existing Human-in-the-loop response.""" payload = UpdateHITLDetail( ti_id=ti_id, - response_content=response_content, + chosen_options=chosen_options, params_input=params_input, ) resp = self.client.patch( @@ -680,7 +680,7 @@ def update_response( ) return HITLDetailResponse.model_validate_json(resp.read()) - def get_response_content_detail(self, ti_id: uuid.UUID) -> HITLDetailResponse: + def get_detail_response(self, ti_id: uuid.UUID) -> HITLDetailResponse: """Get content part of a Human-in-the-loop response for a specific Task Instance.""" resp = self.client.get(f"/hitl-details/{ti_id}") return HITLDetailResponse.model_validate_json(resp.read()) diff --git a/task-sdk/src/airflow/sdk/api/datamodels/_generated.py b/task-sdk/src/airflow/sdk/api/datamodels/_generated.py index 541bb1397140b..15641090bbd2e 100644 --- a/task-sdk/src/airflow/sdk/api/datamodels/_generated.py +++ b/task-sdk/src/airflow/sdk/api/datamodels/_generated.py @@ -193,7 +193,7 @@ class HITLDetailResponse(BaseModel): response_received: Annotated[bool, Field(title="Response Received")] user_id: Annotated[str | None, Field(title="User Id")] = None response_at: Annotated[AwareDatetime | None, Field(title="Response At")] = None - response_content: Annotated[list[str] | None, Field(title="Response Content")] = None + chosen_options: Annotated[list[str] | None, Field(title="Response Content")] = None params_input: Annotated[dict[str, Any] | None, Field(title="Params Input")] = None @@ -374,7 +374,7 @@ class UpdateHITLDetail(BaseModel): """ ti_id: Annotated[UUID, Field(title="Ti Id")] - response_content: Annotated[list[str], Field(title="Response Content")] + chosen_options: Annotated[list[str], Field(title="Response Content")] params_input: Annotated[dict[str, Any] | None, Field(title="Params Input")] = None type: Annotated[Literal["UpdateHITLDetail"] | None, Field(title="Type")] = "UpdateHITLDetail" diff --git a/task-sdk/src/airflow/sdk/execution_time/hitl.py b/task-sdk/src/airflow/sdk/execution_time/hitl.py index f15ffb218320b..235bc92336308 100644 --- a/task-sdk/src/airflow/sdk/execution_time/hitl.py +++ b/task-sdk/src/airflow/sdk/execution_time/hitl.py @@ -54,9 +54,9 @@ def add_hitl_detail( ) -def update_htil_response_content_detail( +def update_htil_detail_response( ti_id: UUID, - response_content: list[str], + chosen_options: list[str], params_input: dict[str, Any], ) -> HITLDetailResponse: from airflow.sdk.execution_time.task_runner import SUPERVISOR_COMMS @@ -64,7 +64,7 @@ def update_htil_response_content_detail( response = SUPERVISOR_COMMS.send( msg=UpdateHITLDetail( ti_id=ti_id, - response_content=response_content, + chosen_options=chosen_options, params_input=params_input, ), ) diff --git a/task-sdk/tests/task_sdk/api/test_client.py b/task-sdk/tests/task_sdk/api/test_client.py index ff65a85415770..93bcfb6f548d3 100644 --- a/task-sdk/tests/task_sdk/api/test_client.py +++ b/task-sdk/tests/task_sdk/api/test_client.py @@ -1208,7 +1208,7 @@ def handle_request(request: httpx.Request) -> httpx.Response: return httpx.Response( status_code=200, json={ - "response_content": ["Approval"], + "chosen_options": ["Approval"], "params_input": {}, "user_id": "admin", "response_received": True, @@ -1220,17 +1220,17 @@ def handle_request(request: httpx.Request) -> httpx.Response: client = make_client(transport=httpx.MockTransport(handle_request)) result = client.hitl.update_response( ti_id=ti_id, - response_content=["Approve"], + chosen_options=["Approve"], params_input={}, ) assert isinstance(result, HITLDetailResponse) assert result.response_received is True - assert result.response_content == ["Approval"] + assert result.chosen_options == ["Approval"] assert result.params_input == {} assert result.user_id == "admin" assert result.response_at == timezone.datetime(2025, 7, 3, 0, 0, 0) - def test_get_response_content_detail(self, time_machine: TimeMachineFixture) -> None: + def test_get_detail_response(self, time_machine: TimeMachineFixture) -> None: time_machine.move_to(datetime(2025, 7, 3, 0, 0, 0)) ti_id = uuid7() @@ -1239,7 +1239,7 @@ def handle_request(request: httpx.Request) -> httpx.Response: return httpx.Response( status_code=200, json={ - "response_content": ["Approval"], + "chosen_options": ["Approval"], "params_input": {}, "user_id": "admin", "response_received": True, @@ -1249,10 +1249,10 @@ def handle_request(request: httpx.Request) -> httpx.Response: return httpx.Response(status_code=400, json={"detail": "Bad Request"}) client = make_client(transport=httpx.MockTransport(handle_request)) - result = client.hitl.get_response_content_detail(ti_id=ti_id) + result = client.hitl.get_detail_response(ti_id=ti_id) assert isinstance(result, HITLDetailResponse) assert result.response_received is True - assert result.response_content == ["Approval"] + assert result.chosen_options == ["Approval"] assert result.params_input == {} assert result.user_id == "admin" assert result.response_at == timezone.datetime(2025, 7, 3, 0, 0, 0) diff --git a/task-sdk/tests/task_sdk/execution_time/test_hitl.py b/task-sdk/tests/task_sdk/execution_time/test_hitl.py index e4abf9004254b..7f717dfa85c40 100644 --- a/task-sdk/tests/task_sdk/execution_time/test_hitl.py +++ b/task-sdk/tests/task_sdk/execution_time/test_hitl.py @@ -24,7 +24,7 @@ from airflow.sdk.execution_time.hitl import ( add_hitl_detail, get_hitl_detail_content_detail, - update_htil_response_content_detail, + update_htil_detail_response, ) from airflow.utils import timezone @@ -54,23 +54,23 @@ def test_add_hitl_detail(mock_supervisor_comms) -> None: ) -def test_update_htil_response_content_detail(mock_supervisor_comms) -> None: +def test_update_htil_detail_response(mock_supervisor_comms) -> None: timestamp = timezone.utcnow() mock_supervisor_comms.send.return_value = HITLDetailResponse( response_received=True, - response_content=["Approve"], + chosen_options=["Approve"], response_at=timestamp, user_id="admin", params_input={"input_1": 1}, ) - resp = update_htil_response_content_detail( + resp = update_htil_detail_response( ti_id=TI_ID, - response_content=["Approve"], + chosen_options=["Approve"], params_input={"input_1": 1}, ) assert resp == HITLDetailResponse( response_received=True, - response_content=["Approve"], + chosen_options=["Approve"], response_at=timestamp, user_id="admin", params_input={"input_1": 1}, @@ -80,7 +80,7 @@ def test_update_htil_response_content_detail(mock_supervisor_comms) -> None: def test_get_hitl_detail_content_detail(mock_supervisor_comms) -> None: mock_supervisor_comms.send.return_value = HITLDetailResponse( response_received=False, - response_content=None, + chosen_options=None, response_at=None, user_id=None, params_input={}, @@ -88,7 +88,7 @@ def test_get_hitl_detail_content_detail(mock_supervisor_comms) -> None: resp = get_hitl_detail_content_detail(TI_ID) assert resp == HITLDetailResponse( response_received=False, - response_content=None, + chosen_options=None, response_at=None, user_id=None, params_input={}, From 63978dbb9d3c22309f650770659ce0f9f1adb71a Mon Sep 17 00:00:00 2001 From: Wei Lee Date: Wed, 9 Jul 2025 18:14:09 +0800 Subject: [PATCH 25/30] feat(hitl): rename default as defaults --- airflow-core/docs/img/airflow_erd.sha256 | 2 +- airflow-core/docs/img/airflow_erd.svg | 46 +++++++++---------- .../api_fastapi/core_api/datamodels/hitl.py | 2 +- .../openapi/v2-rest-api-generated.yaml | 20 ++++---- .../execution_api/datamodels/hitl.py | 4 +- .../api_fastapi/execution_api/routes/hitl.py | 4 +- ...77_3_1_0_add_human_in_the_loop_response.py | 2 +- airflow-core/src/airflow/models/hitl.py | 2 +- .../core_api/routes/public/test_hitl.py | 4 +- .../execution_api/versions/head/test_hitl.py | 10 ++-- .../airflowctl/api/datamodels/generated.py | 8 ++-- .../providers/standard/operators/hitl.py | 28 +++++------ .../providers/standard/triggers/hitl.py | 14 +++--- .../unit/standard/operators/test_hitl.py | 22 +++++---- .../tests/unit/standard/triggers/test_hitl.py | 8 ++-- task-sdk/src/airflow/sdk/api/client.py | 4 +- .../airflow/sdk/api/datamodels/_generated.py | 8 ++-- .../src/airflow/sdk/execution_time/hitl.py | 4 +- .../airflow/sdk/execution_time/supervisor.py | 2 +- task-sdk/tests/task_sdk/api/test_client.py | 6 +-- .../execution_time/test_supervisor.py | 8 ++-- 21 files changed, 106 insertions(+), 102 deletions(-) diff --git a/airflow-core/docs/img/airflow_erd.sha256 b/airflow-core/docs/img/airflow_erd.sha256 index 071d8789fb485..e935d2a08ab18 100644 --- a/airflow-core/docs/img/airflow_erd.sha256 +++ b/airflow-core/docs/img/airflow_erd.sha256 @@ -1 +1 @@ -16689ca84560a2611b67137f8fd0e44b703c2ceaca1b1db1c1fd1c9afd03e85a \ No newline at end of file +2e49ab99fe1076b0f3f22a52b9ee37eeb7fc20a5a043ea504cc26022f4315277 \ No newline at end of file diff --git a/airflow-core/docs/img/airflow_erd.svg b/airflow-core/docs/img/airflow_erd.svg index f960dfb5fc2fd..2f9f9b4becc5e 100644 --- a/airflow-core/docs/img/airflow_erd.svg +++ b/airflow-core/docs/img/airflow_erd.svg @@ -1336,36 +1336,36 @@ [TEXT] -default - - [JSON] +chosen_options + + [JSON] -multiple +defaults - [BOOLEAN] + [JSON] -options - - [JSON] - NOT NULL +multiple + + [BOOLEAN] -params - - [JSON] - NOT NULL +options + + [JSON] + NOT NULL -params_input - - [JSON] - NOT NULL +params + + [JSON] + NOT NULL -response_at - - [TIMESTAMP] +params_input + + [JSON] + NOT NULL -response_content - - [JSON] +response_at + + [TIMESTAMP] subject diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/hitl.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/hitl.py index 46ddde964d84d..88ad702316423 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/hitl.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/hitl.py @@ -51,7 +51,7 @@ class HITLDetail(BaseModel): options: list[str] subject: str body: str | None = None - default: list[str] | None = None + defaults: list[str] | None = None multiple: bool = False params: dict[str, Any] = Field(default_factory=dict) diff --git a/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml b/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml index f41b2ae11f375..8b2969a91bf7f 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml +++ b/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml @@ -9744,13 +9744,13 @@ components: - type: string - type: 'null' title: Body - default: + defaults: anyOf: - items: type: string type: array - type: 'null' - title: Default + title: Defaults multiple: type: boolean title: Multiple @@ -9770,13 +9770,13 @@ components: format: date-time - type: 'null' title: Response At - response_content: + chosen_options: anyOf: - items: type: string type: array - type: 'null' - title: Response Content + title: Chosen Options params_input: additionalProperties: true type: object @@ -9817,11 +9817,11 @@ components: type: string format: date-time title: Response At - response_content: + chosen_options: items: type: string type: array - title: Response Content + title: Chosen Options params_input: additionalProperties: true type: object @@ -9830,7 +9830,7 @@ components: required: - user_id - response_at - - response_content + - chosen_options title: HITLDetailResponse description: Response of updating a Human-in-the-loop detail. HTTPExceptionResponse: @@ -11364,18 +11364,18 @@ components: description: Triggerer info serializer for responses. UpdateHITLDetailPayload: properties: - response_content: + chosen_options: items: type: string type: array - title: Response Content + title: Chosen Options params_input: additionalProperties: true type: object title: Params Input type: object required: - - response_content + - chosen_options title: UpdateHITLDetailPayload description: Schema for updating the content of a Human-in-the-loop detail. ValidationError: diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/hitl.py b/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/hitl.py index 6789ac97dad31..c75ca8c14f2ee 100644 --- a/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/hitl.py +++ b/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/hitl.py @@ -33,7 +33,7 @@ class HITLDetailRequest(BaseModel): options: list[str] subject: str body: str | None = None - default: list[str] | None = None + defaults: list[str] | None = None multiple: bool = False params: dict[str, Any] = Field(default_factory=dict) @@ -45,7 +45,7 @@ class GetHITLDetailResponsePayload(BaseModel): class UpdateHITLDetailPayload(BaseModel): - """Schema for writing the resposne part of a Human-in-the-loop detail for a specific task instance.""" + """Schema for writing the response part of a Human-in-the-loop detail for a specific task instance.""" ti_id: UUID chosen_options: list[str] diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/routes/hitl.py b/airflow-core/src/airflow/api_fastapi/execution_api/routes/hitl.py index e4bc309c792f5..a82e496a8a7a2 100644 --- a/airflow-core/src/airflow/api_fastapi/execution_api/routes/hitl.py +++ b/airflow-core/src/airflow/api_fastapi/execution_api/routes/hitl.py @@ -59,7 +59,7 @@ def add_hitl_detail( options=payload.options, subject=payload.subject, body=payload.body, - default=payload.default, + defaults=payload.defaults, multiple=payload.multiple, params=payload.params, ) @@ -83,7 +83,7 @@ def update_hitl_detail( f"Human-in-the-loop detail for Task Instance with id {ti_id_str} already exists.", ) - hitl_detail_model.user_id = "Fallback to default" + hitl_detail_model.user_id = "Fallback to defaults" hitl_detail_model.response_at = datetime.now(timezone.utc) hitl_detail_model.chosen_options = payload.chosen_options hitl_detail_model.params_input = payload.params_input diff --git a/airflow-core/src/airflow/migrations/versions/0077_3_1_0_add_human_in_the_loop_response.py b/airflow-core/src/airflow/migrations/versions/0077_3_1_0_add_human_in_the_loop_response.py index b41b22c11bc1a..61f950f5d120e 100644 --- a/airflow-core/src/airflow/migrations/versions/0077_3_1_0_add_human_in_the_loop_response.py +++ b/airflow-core/src/airflow/migrations/versions/0077_3_1_0_add_human_in_the_loop_response.py @@ -56,7 +56,7 @@ def upgrade(): Column("options", sqlalchemy_jsonfield.JSONField(json=json), nullable=False), Column("subject", Text, nullable=False), Column("body", Text, nullable=True), - Column("default", sqlalchemy_jsonfield.JSONField(json=json), nullable=True), + Column("defaults", sqlalchemy_jsonfield.JSONField(json=json), nullable=True), Column("multiple", Boolean, unique=False, default=False), Column("params", sqlalchemy_jsonfield.JSONField(json=json), nullable=False, default={}), Column("response_at", UtcDateTime, nullable=True), diff --git a/airflow-core/src/airflow/models/hitl.py b/airflow-core/src/airflow/models/hitl.py index e1c51d4b63aa9..9d060ba1c19d7 100644 --- a/airflow-core/src/airflow/models/hitl.py +++ b/airflow-core/src/airflow/models/hitl.py @@ -40,7 +40,7 @@ class HITLDetail(Base): options = Column(sqlalchemy_jsonfield.JSONField(json=json), nullable=False) subject = Column(Text, nullable=False) body = Column(Text, nullable=True) - default = Column(sqlalchemy_jsonfield.JSONField(json=json), nullable=True) + defaults = Column(sqlalchemy_jsonfield.JSONField(json=json), nullable=True) multiple = Column(Boolean, unique=False, default=False) params = Column(sqlalchemy_jsonfield.JSONField(json=json), nullable=False, default={}) diff --git a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_hitl.py b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_hitl.py index d0b02ba03def5..7bbe7153e8257 100644 --- a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_hitl.py +++ b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_hitl.py @@ -51,7 +51,7 @@ def sample_hitl_detail(session, sample_ti) -> HITLDetail: options=["Approve", "Reject"], subject="This is subject", body="this is body", - default=["Approve"], + defaults=["Approve"], multiple=False, params={"input_1": 1}, ) @@ -65,7 +65,7 @@ def sample_hitl_detail(session, sample_ti) -> HITLDetail: def expected_sample_hitl_detail_dict(sample_ti) -> dict[str, Any]: return { "body": "this is body", - "default": ["Approve"], + "defaults": ["Approve"], "multiple": False, "options": ["Approve", "Reject"], "params": {"input_1": 1}, diff --git a/airflow-core/tests/unit/api_fastapi/execution_api/versions/head/test_hitl.py b/airflow-core/tests/unit/api_fastapi/execution_api/versions/head/test_hitl.py index ff9da89fbe5fe..3324730477082 100644 --- a/airflow-core/tests/unit/api_fastapi/execution_api/versions/head/test_hitl.py +++ b/airflow-core/tests/unit/api_fastapi/execution_api/versions/head/test_hitl.py @@ -50,7 +50,7 @@ def sample_hitl_detail(session, sample_ti) -> HITLDetail: options=["Approve", "Reject"], subject="This is subject", body="this is body", - default=["Approve"], + defaults=["Approve"], multiple=False, params={"input_1": 1}, ) @@ -64,7 +64,7 @@ def sample_hitl_detail(session, sample_ti) -> HITLDetail: def expected_sample_hitl_detail_dict(sample_ti) -> dict[str, Any]: return { "body": "this is body", - "default": ["Approve"], + "defaults": ["Approve"], "multiple": False, "options": ["Approve", "Reject"], "params": {"input_1": 1}, @@ -89,7 +89,7 @@ def test_add_hitl_detail(client, create_task_instance, session) -> None: "options": ["Approve", "Reject"], "subject": "This is subject", "body": "this is body", - "default": ["Approve"], + "defaults": ["Approve"], "multiple": False, "params": {"input_1": 1}, }, @@ -100,7 +100,7 @@ def test_add_hitl_detail(client, create_task_instance, session) -> None: "options": ["Approve", "Reject"], "subject": "This is subject", "body": "this is body", - "default": ["Approve"], + "defaults": ["Approve"], "multiple": False, "params": {"input_1": 1}, } @@ -123,7 +123,7 @@ def test_update_hitl_detail(client, sample_ti) -> None: "response_at": "2025-07-03T00:00:00Z", "chosen_options": ["Reject"], "response_received": True, - "user_id": "Fallback to default", + "user_id": "Fallback to defaults", } diff --git a/airflow-ctl/src/airflowctl/api/datamodels/generated.py b/airflow-ctl/src/airflowctl/api/datamodels/generated.py index 71bebe428ceb4..0824759850a68 100644 --- a/airflow-ctl/src/airflowctl/api/datamodels/generated.py +++ b/airflow-ctl/src/airflowctl/api/datamodels/generated.py @@ -582,12 +582,12 @@ class HITLDetail(BaseModel): options: Annotated[list[str], Field(title="Options")] subject: Annotated[str, Field(title="Subject")] body: Annotated[str | None, Field(title="Body")] = None - default: Annotated[list[str] | None, Field(title="Default")] = None + defaults: Annotated[list[str] | None, Field(title="Defaults")] = None multiple: Annotated[bool | None, Field(title="Multiple")] = False params: Annotated[dict[str, Any] | None, Field(title="Params")] = None user_id: Annotated[str | None, Field(title="User Id")] = None response_at: Annotated[datetime | None, Field(title="Response At")] = None - chosen_options: Annotated[list[str] | None, Field(title="Response Content")] = None + chosen_options: Annotated[list[str] | None, Field(title="Chosen Options")] = None params_input: Annotated[dict[str, Any] | None, Field(title="Params Input")] = None response_received: Annotated[bool | None, Field(title="Response Received")] = False @@ -608,7 +608,7 @@ class HITLDetailResponse(BaseModel): user_id: Annotated[str, Field(title="User Id")] response_at: Annotated[datetime, Field(title="Response At")] - chosen_options: Annotated[list[str], Field(title="Response Content")] + chosen_options: Annotated[list[str], Field(title="Chosen Options")] params_input: Annotated[dict[str, Any] | None, Field(title="Params Input")] = None @@ -943,7 +943,7 @@ class UpdateHITLDetailPayload(BaseModel): Schema for updating the content of a Human-in-the-loop detail. """ - chosen_options: Annotated[list[str], Field(title="Response Content")] + chosen_options: Annotated[list[str], Field(title="Chosen Options")] params_input: Annotated[dict[str, Any] | None, Field(title="Params Input")] = None diff --git a/providers/standard/src/airflow/providers/standard/operators/hitl.py b/providers/standard/src/airflow/providers/standard/operators/hitl.py index 6a110b0324c2b..641e56542b924 100644 --- a/providers/standard/src/airflow/providers/standard/operators/hitl.py +++ b/providers/standard/src/airflow/providers/standard/operators/hitl.py @@ -45,7 +45,7 @@ class HITLOperator(BaseOperator): :param subject: Headline/subject presented to the user for the interaction task. :param options: List of options that the an user can select from to complete the task. :param body: Descriptive text (with Markdown support) that gives the details that are needed to decide. - :param default: The default option and the option that is taken if timeout is passed. + :param defaults: The default options and the options that are taken if timeout is passed. :param multiple: Whether the user can select one or multiple options. :param params: dictionary of parameter definitions that are in the format of Dag params such that a Form Field can be rendered. Entered data is validated (schema, required fields) like for a Dag run @@ -60,7 +60,7 @@ def __init__( subject: str, options: list[str], body: str | None = None, - default: str | list[str] | None = None, + defaults: str | list[str] | None = None, multiple: bool = False, params: ParamsDict | dict[str, Any] | None = None, **kwargs, @@ -71,7 +71,7 @@ def __init__( self.options = options # allow defaults to store more than one options when multiple=True - self.default = [default] if isinstance(default, str) else default + self.defaults = [defaults] if isinstance(defaults, str) else defaults self.multiple = multiple self.params: ParamsDict = params if isinstance(params, ParamsDict) else ParamsDict(params or {}) @@ -80,21 +80,21 @@ def __init__( def validate_defaults(self) -> None: """ - Validate whether the given default pass the following criteria. + Validate whether the given defaults pass the following criteria. 1. When timeout is set, default options should be provided. 2. Default options should be the subset of options. 3. When multiple is False, there should only be one option. """ - if self.default is None and self.execution_timeout: - raise ValueError('"default" is required when "execution_timeout" is provided.') + if self.defaults is None and self.execution_timeout: + raise ValueError('"defaults" is required when "execution_timeout" is provided.') - if self.default is not None: - if not set(self.default).issubset(self.options): - raise ValueError(f'default "{self.default}" should be a subset of options "{self.options}"') + if self.defaults is not None: + if not set(self.defaults).issubset(self.options): + raise ValueError(f'defaults "{self.defaults}" should be a subset of options "{self.options}"') - if self.multiple is False and len(self.default) > 1: - raise ValueError('More than one default given when "multiple" is set to False.') + if self.multiple is False and len(self.defaults) > 1: + raise ValueError('More than one defaults given when "multiple" is set to False.') def execute(self, context: Context): """Add a Human-in-the-loop Response and then defer to HITLTrigger and wait for user input.""" @@ -105,7 +105,7 @@ def execute(self, context: Context): options=self.options, subject=self.subject, body=self.body, - default=self.default, + defaults=self.defaults, multiple=self.multiple, params=self.serialzed_params, ) @@ -119,7 +119,7 @@ def execute(self, context: Context): trigger=HITLTrigger( ti_id=ti_id, options=self.options, - default=self.default, + defaults=self.defaults, params=self.serialzed_params, multiple=self.multiple, timeout_datetime=timeout_datetime, @@ -200,6 +200,6 @@ class HITLEntryOperator(HITLOperator): def __init__(self, **kwargs) -> None: if "options" not in kwargs: kwargs["options"] = ["OK"] - kwargs["default"] = ["OK"] + kwargs["defaults"] = ["OK"] super().__init__(**kwargs) diff --git a/providers/standard/src/airflow/providers/standard/triggers/hitl.py b/providers/standard/src/airflow/providers/standard/triggers/hitl.py index 574eb5b84abf5..0e9690346c5c7 100644 --- a/providers/standard/src/airflow/providers/standard/triggers/hitl.py +++ b/providers/standard/src/airflow/providers/standard/triggers/hitl.py @@ -60,7 +60,7 @@ def __init__( ti_id: UUID, options: list[str], params: dict[str, Any], - default: list[str] | None = None, + defaults: list[str] | None = None, multiple: bool = False, timeout_datetime: datetime | None, poke_interval: float = 5.0, @@ -72,7 +72,7 @@ def __init__( self.options = options self.multiple = multiple - self.default = default + self.defaults = defaults self.timeout_datetime = timeout_datetime self.params = params @@ -84,7 +84,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: { "ti_id": self.ti_id, "options": self.options, - "default": self.default, + "defaults": self.defaults, "params": self.params, "multiple": self.multiple, "timeout_datetime": self.timeout_datetime, @@ -97,22 +97,22 @@ async def run(self) -> AsyncIterator[TriggerEvent]: while True: if self.timeout_datetime and self.timeout_datetime < timezone.utcnow(): # This normally should be checked in the HITLOperator - if self.default is None: + if self.defaults is None: yield TriggerEvent( HITLTriggerEventFailurePayload( - error='default" is required when "execution_timeout" is provided.' + error='defaults" is required when "execution_timeout" is provided.' ) ) return await sync_to_async(update_htil_detail_response)( ti_id=self.ti_id, - chosen_options=self.default, + chosen_options=self.defaults, params_input=self.params, ) yield TriggerEvent( HITLTriggerEventSuccessPayload( - chosen_options=self.default, + chosen_options=self.defaults, params_input=self.params, ) ) diff --git a/providers/standard/tests/unit/standard/operators/test_hitl.py b/providers/standard/tests/unit/standard/operators/test_hitl.py index d8c31b39d2b79..9400fc0b6308c 100644 --- a/providers/standard/tests/unit/standard/operators/test_hitl.py +++ b/providers/standard/tests/unit/standard/operators/test_hitl.py @@ -47,7 +47,7 @@ def test_validate_defaults(self) -> None: subject="This is subject", options=["1", "2", "3", "4", "5"], body="This is body", - default=["1"], + defaults=["1"], multiple=False, params=ParamsDict({"input_1": 1}), ) @@ -56,11 +56,15 @@ def test_validate_defaults(self) -> None: @pytest.mark.parametrize( "extra_kwargs", [ - {"default": None, "execution_timeout": 10}, - {"default": ["0"]}, - {"multiple": False, "default": ["1", "2"]}, + {"defaults": None, "execution_timeout": 10}, + {"defaults": ["0"]}, + {"multiple": False, "defaults": ["1", "2"]}, + ], + ids=[ + "timeout with no defaults", + "defaults not in option", + "multiple defaults when multiple is False", ], - ids=["timeout with no default", "default not in option", "multiple default when multiple is False"], ) def test_validate_defaults_with_invalid_defaults(self, extra_kwargs) -> None: with pytest.raises(ValueError): @@ -80,7 +84,7 @@ def test_execute(self, dag_maker, session) -> None: subject="This is subject", options=["1", "2", "3", "4", "5"], body="This is body", - default=["1"], + defaults=["1"], multiple=False, params=ParamsDict({"input_1": 1}), ) @@ -92,7 +96,7 @@ def test_execute(self, dag_maker, session) -> None: assert hitl_detail_model.subject == "This is subject" assert hitl_detail_model.options == ["1", "2", "3", "4", "5"] assert hitl_detail_model.body == "This is body" - assert hitl_detail_model.default == ["1"] + assert hitl_detail_model.defaults == ["1"] assert hitl_detail_model.multiple is False assert hitl_detail_model.params == {"input_1": 1} assert hitl_detail_model.response_at is None @@ -106,7 +110,7 @@ def test_execute(self, dag_maker, session) -> None: assert registered_trigger.kwargs == { "ti_id": ti.id, "options": ["1", "2", "3", "4", "5"], - "default": ["1"], + "defaults": ["1"], "params": {"input_1": 1}, "multiple": False, "timeout_datetime": None, @@ -219,4 +223,4 @@ def test_init(self): ) assert op.options == ["OK"] - assert op.default == ["OK"] + assert op.defaults == ["OK"] diff --git a/providers/standard/tests/unit/standard/triggers/test_hitl.py b/providers/standard/tests/unit/standard/triggers/test_hitl.py index 29a4cc14e0fec..abfb641754c9e 100644 --- a/providers/standard/tests/unit/standard/triggers/test_hitl.py +++ b/providers/standard/tests/unit/standard/triggers/test_hitl.py @@ -44,7 +44,7 @@ def test_serialization(self): ti_id=TI_ID, options=["1", "2", "3", "4", "5"], params={"input": 1}, - default=["1"], + defaults=["1"], multiple=False, timeout_datetime=None, poke_interval=50.0, @@ -55,7 +55,7 @@ def test_serialization(self): "ti_id": TI_ID, "options": ["1", "2", "3", "4", "5"], "params": {"input": 1}, - "default": ["1"], + "defaults": ["1"], "multiple": False, "timeout_datetime": None, "poke_interval": 50.0, @@ -69,7 +69,7 @@ async def test_run_fallback_to_default_due_to_timeout(self, mock_update, mock_su ti_id=TI_ID, options=["1", "2", "3", "4", "5"], params={"input": 1}, - default=["1"], + defaults=["1"], multiple=False, timeout_datetime=utcnow() + timedelta(seconds=0.1), poke_interval=5, @@ -101,7 +101,7 @@ async def test_run(self, mock_update, mock_supervisor_comms): ti_id=TI_ID, options=["1", "2", "3", "4", "5"], params={"input": 1}, - default=["1"], + defaults=["1"], multiple=False, timeout_datetime=None, poke_interval=5, diff --git a/task-sdk/src/airflow/sdk/api/client.py b/task-sdk/src/airflow/sdk/api/client.py index b5663ca04854c..c8e45c10b3f66 100644 --- a/task-sdk/src/airflow/sdk/api/client.py +++ b/task-sdk/src/airflow/sdk/api/client.py @@ -641,7 +641,7 @@ def add_response( options: list[str], subject: str, body: str | None = None, - default: list[str] | None = None, + defaults: list[str] | None = None, multiple: bool = False, params: dict[str, Any] | None = None, ) -> HITLDetailRequestResult: @@ -651,7 +651,7 @@ def add_response( options=options, subject=subject, body=body, - default=default, + defaults=defaults, multiple=multiple, params=params, ) diff --git a/task-sdk/src/airflow/sdk/api/datamodels/_generated.py b/task-sdk/src/airflow/sdk/api/datamodels/_generated.py index 15641090bbd2e..13e41bd5b6a0a 100644 --- a/task-sdk/src/airflow/sdk/api/datamodels/_generated.py +++ b/task-sdk/src/airflow/sdk/api/datamodels/_generated.py @@ -111,7 +111,7 @@ class CreateHITLDetailPayload(BaseModel): options: Annotated[list[str], Field(title="Options")] subject: Annotated[str, Field(title="Subject")] body: Annotated[str | None, Field(title="Body")] = None - default: Annotated[list[str] | None, Field(title="Default")] = None + defaults: Annotated[list[str] | None, Field(title="Defaults")] = None multiple: Annotated[bool | None, Field(title="Multiple")] = False params: Annotated[dict[str, Any] | None, Field(title="Params")] = None type: Annotated[Literal["CreateHITLDetailPayload"] | None, Field(title="Type")] = ( @@ -180,7 +180,7 @@ class HITLDetailRequest(BaseModel): options: Annotated[list[str], Field(title="Options")] subject: Annotated[str, Field(title="Subject")] body: Annotated[str | None, Field(title="Body")] = None - default: Annotated[list[str] | None, Field(title="Default")] = None + defaults: Annotated[list[str] | None, Field(title="Defaults")] = None multiple: Annotated[bool | None, Field(title="Multiple")] = False params: Annotated[dict[str, Any] | None, Field(title="Params")] = None @@ -193,7 +193,7 @@ class HITLDetailResponse(BaseModel): response_received: Annotated[bool, Field(title="Response Received")] user_id: Annotated[str | None, Field(title="User Id")] = None response_at: Annotated[AwareDatetime | None, Field(title="Response At")] = None - chosen_options: Annotated[list[str] | None, Field(title="Response Content")] = None + chosen_options: Annotated[list[str] | None, Field(title="Chosen Options")] = None params_input: Annotated[dict[str, Any] | None, Field(title="Params Input")] = None @@ -374,7 +374,7 @@ class UpdateHITLDetail(BaseModel): """ ti_id: Annotated[UUID, Field(title="Ti Id")] - chosen_options: Annotated[list[str], Field(title="Response Content")] + chosen_options: Annotated[list[str], Field(title="Chosen Options")] params_input: Annotated[dict[str, Any] | None, Field(title="Params Input")] = None type: Annotated[Literal["UpdateHITLDetail"] | None, Field(title="Type")] = "UpdateHITLDetail" diff --git a/task-sdk/src/airflow/sdk/execution_time/hitl.py b/task-sdk/src/airflow/sdk/execution_time/hitl.py index 235bc92336308..6f6409b3dcadb 100644 --- a/task-sdk/src/airflow/sdk/execution_time/hitl.py +++ b/task-sdk/src/airflow/sdk/execution_time/hitl.py @@ -35,7 +35,7 @@ def add_hitl_detail( options: list[str], subject: str, body: str | None = None, - default: list[str] | None = None, + defaults: list[str] | None = None, multiple: bool = False, params: dict[str, Any] | None = None, ) -> None: @@ -47,7 +47,7 @@ def add_hitl_detail( options=options, subject=subject, body=body, - default=default, + defaults=defaults, params=params, multiple=multiple, ) diff --git a/task-sdk/src/airflow/sdk/execution_time/supervisor.py b/task-sdk/src/airflow/sdk/execution_time/supervisor.py index 9bb8394554529..f5eede6274df4 100644 --- a/task-sdk/src/airflow/sdk/execution_time/supervisor.py +++ b/task-sdk/src/airflow/sdk/execution_time/supervisor.py @@ -1237,7 +1237,7 @@ def _handle_request(self, msg: ToSupervisor, log: FilteringBoundLogger, req_id: options=msg.options, subject=msg.subject, body=msg.body, - default=msg.default, + defaults=msg.defaults, params=msg.params, multiple=msg.multiple, ) diff --git a/task-sdk/tests/task_sdk/api/test_client.py b/task-sdk/tests/task_sdk/api/test_client.py index 93bcfb6f548d3..4f4af1e137242 100644 --- a/task-sdk/tests/task_sdk/api/test_client.py +++ b/task-sdk/tests/task_sdk/api/test_client.py @@ -1173,7 +1173,7 @@ def handle_request(request: httpx.Request) -> httpx.Response: "options": ["Approval", "Reject"], "subject": "This is subject", "body": "This is body", - "default": ["Approval"], + "defaults": ["Approval"], "params": None, "multiple": False, }, @@ -1186,7 +1186,7 @@ def handle_request(request: httpx.Request) -> httpx.Response: options=["Approval", "Reject"], subject="This is subject", body="This is body", - default=["Approval"], + defaults=["Approval"], params=None, multiple=False, ) @@ -1195,7 +1195,7 @@ def handle_request(request: httpx.Request) -> httpx.Response: assert result.options == ["Approval", "Reject"] assert result.subject == "This is subject" assert result.body == "This is body" - assert result.default == ["Approval"] + assert result.defaults == ["Approval"] assert result.params is None assert result.multiple is False diff --git a/task-sdk/tests/task_sdk/execution_time/test_supervisor.py b/task-sdk/tests/task_sdk/execution_time/test_supervisor.py index 7983617e31158..db71ab76f8207 100644 --- a/task-sdk/tests/task_sdk/execution_time/test_supervisor.py +++ b/task-sdk/tests/task_sdk/execution_time/test_supervisor.py @@ -1778,7 +1778,7 @@ def watched_subprocess(self, mocker): options=["Approve", "Reject"], subject="This is subject", body="This is body", - default=["Approve"], + defaults=["Approve"], multiple=False, params={}, ), @@ -1787,7 +1787,7 @@ def watched_subprocess(self, mocker): "options": ["Approve", "Reject"], "subject": "This is subject", "body": "This is body", - "default": ["Approve"], + "defaults": ["Approve"], "multiple": False, "params": {}, "type": "HITLDetailRequestResult", @@ -1796,7 +1796,7 @@ def watched_subprocess(self, mocker): (), { "body": "This is body", - "default": ["Approve"], + "defaults": ["Approve"], "multiple": False, "options": ["Approve", "Reject"], "params": {}, @@ -1808,7 +1808,7 @@ def watched_subprocess(self, mocker): options=["Approve", "Reject"], subject="This is subject", body="This is body", - default=["Approve"], + defaults=["Approve"], multiple=False, params={}, ), From 37ff01cc5702def7e1a78f896f39472bcb182de0 Mon Sep 17 00:00:00 2001 From: Wei Lee Date: Wed, 9 Jul 2025 19:50:26 +0800 Subject: [PATCH 26/30] feat(hitl): rewrite public api to fit frontend's need --- .../openapi/v2-rest-api-generated.yaml | 184 ++++++++++++++++- .../core_api/routes/public/hitl.py | 191 +++++++++++++++--- .../airflow/ui/openapi-gen/queries/common.ts | 18 +- .../ui/openapi-gen/queries/ensureQueryData.ts | 29 ++- .../ui/openapi-gen/queries/prefetch.ts | 29 ++- .../airflow/ui/openapi-gen/queries/queries.ts | 68 ++++++- .../ui/openapi-gen/queries/suspense.ts | 29 ++- .../ui/openapi-gen/requests/schemas.gen.ts | 20 +- .../ui/openapi-gen/requests/services.gen.ts | 86 +++++++- .../ui/openapi-gen/requests/types.gen.ts | 93 ++++++++- .../core_api/routes/public/test_hitl.py | 60 ++++-- .../task_sdk/execution_time/test_hitl.py | 4 +- 12 files changed, 709 insertions(+), 102 deletions(-) diff --git a/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml b/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml index 8b2969a91bf7f..0a5e659d2bb58 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml +++ b/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml @@ -7122,7 +7122,7 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' - /api/v2/hitl-details/{task_instance_id}: + /api/v2/hitl-details/{dag_id}/{dag_run_id}/{task_id}: patch: tags: - HumanInTheLoop @@ -7132,13 +7132,24 @@ paths: security: - OAuth2PasswordBearer: [] parameters: - - name: task_instance_id + - name: dag_id in: path required: true schema: type: string - format: uuid - title: Task Instance Id + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id requestBody: required: true content: @@ -7191,13 +7202,172 @@ paths: security: - OAuth2PasswordBearer: [] parameters: - - name: task_instance_id + - name: dag_id in: path required: true schema: type: string - format: uuid - title: Task Instance Id + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + - name: map_index + in: query + required: false + schema: + type: integer + default: -1 + title: Map Index + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/HITLDetail' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/hitl-details/{dag_id}/{dag_run_id}/{task_id}/{map_index}: + patch: + tags: + - HumanInTheLoop + summary: Update Mapped Ti Hitl Detail + description: Update a Human-in-the-loop detail. + operationId: update_mapped_ti_hitl_detail + security: + - OAuth2PasswordBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + - name: map_index + in: path + required: true + schema: + type: integer + title: Map Index + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/UpdateHITLDetailPayload' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/HITLDetailResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Conflict + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + get: + tags: + - HumanInTheLoop + summary: Get Mapped Ti Hitl Detail + description: Get a Human-in-the-loop detail of a specific task instance. + operationId: get_mapped_ti_hitl_detail + security: + - OAuth2PasswordBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + - name: map_index + in: path + required: true + schema: + type: integer + title: Map Index responses: '200': description: Successful Response diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/hitl.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/hitl.py index 6de052b73ee8b..78c7604b51677 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/hitl.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/hitl.py @@ -16,8 +16,6 @@ # under the License. from __future__ import annotations -from uuid import UUID - import structlog from fastapi import Depends, HTTPException, status from sqlalchemy import select @@ -42,24 +40,53 @@ log = structlog.get_logger(__name__) -@hitl_router.patch( - "/{task_instance_id}", - responses=create_openapi_http_exception_doc( - [ +def _get_task_instance( + dag_id: str, + dag_run_id: str, + task_id: str, + session: SessionDep, + map_index: int | None = None, +) -> TI: + query = select(TI).where( + TI.dag_id == dag_id, + TI.run_id == dag_run_id, + TI.task_id == task_id, + ) + + if map_index is not None: + query = query.where(TI.map_index == map_index) + + task_instance = session.scalar(query) + if task_instance is None: + raise HTTPException( status.HTTP_404_NOT_FOUND, - status.HTTP_409_CONFLICT, - ] - ), - dependencies=[Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.TASK_INSTANCE))], -) -def update_hitl_detail( - task_instance_id: UUID, + f"The Task Instance with dag_id: `{dag_id}`, run_id: `{dag_run_id}`, task_id: `{task_id}` and map_index: `{map_index}` was not found", + ) + if map_index is None and task_instance.map_index != -1: + raise HTTPException( + status.HTTP_404_NOT_FOUND, "Task instance is mapped, add the map_index value to the URL" + ) + + return task_instance + + +def _update_hitl_detail( + dag_id: str, + dag_run_id: str, + task_id: str, update_hitl_detail_payload: UpdateHITLDetailPayload, user: GetUserDep, session: SessionDep, + map_index: int | None = None, ) -> HITLDetailResponse: - """Update a Human-in-the-loop detail.""" - ti_id_str = str(task_instance_id) + task_instance = _get_task_instance( + dag_id=dag_id, + dag_run_id=dag_run_id, + task_id=task_id, + session=session, + map_index=map_index, + ) + ti_id_str = str(task_instance.id) hitl_detail_model = session.scalar(select(HITLDetailModel).where(HITLDetailModel.ti_id == ti_id_str)) if not hitl_detail_model: raise HTTPException( @@ -83,18 +110,28 @@ def update_hitl_detail( return HITLDetailResponse.model_validate(hitl_detail_model) -@hitl_router.get( - "/{task_instance_id}", - status_code=status.HTTP_200_OK, - responses=create_openapi_http_exception_doc([status.HTTP_404_NOT_FOUND]), - dependencies=[Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.TASK_INSTANCE))], -) -def get_hitl_detail( - task_instance_id: UUID, +def _get_hitl_detail( + dag_id: str, + dag_run_id: str, + task_id: str, session: SessionDep, + map_index: int | None = None, ) -> HITLDetail: """Get a Human-in-the-loop detail of a specific task instance.""" - ti_id_str = str(task_instance_id) + task_instance = _get_task_instance( + dag_id=dag_id, + dag_run_id=dag_run_id, + task_id=task_id, + session=session, + map_index=map_index, + ) + if task_instance is None: + raise HTTPException( + status.HTTP_404_NOT_FOUND, + f"The Task Instance with dag_id: `{dag_id}`, run_id: `{dag_run_id}`, task_id: `{task_id}` and map_index: `{map_index}` was not found", + ) + + ti_id_str = str(task_instance.id) hitl_detail_model = session.scalar(select(HITLDetailModel).where(HITLDetailModel.ti_id == ti_id_str)) if not hitl_detail_model: log.error("Human-in-the-loop detail not found") @@ -108,6 +145,112 @@ def get_hitl_detail( return HITLDetail.model_validate(hitl_detail_model) +@hitl_router.patch( + "/{dag_id}/{dag_run_id}/{task_id}", + responses=create_openapi_http_exception_doc( + [ + status.HTTP_404_NOT_FOUND, + status.HTTP_409_CONFLICT, + ] + ), + dependencies=[Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.TASK_INSTANCE))], +) +def update_hitl_detail( + dag_id: str, + dag_run_id: str, + task_id: str, + update_hitl_detail_payload: UpdateHITLDetailPayload, + user: GetUserDep, + session: SessionDep, +) -> HITLDetailResponse: + """Update a Human-in-the-loop detail.""" + return _update_hitl_detail( + dag_id=dag_id, + dag_run_id=dag_run_id, + task_id=task_id, + session=session, + update_hitl_detail_payload=update_hitl_detail_payload, + user=user, + map_index=None, + ) + + +@hitl_router.patch( + "/{dag_id}/{dag_run_id}/{task_id}/{map_index}", + responses=create_openapi_http_exception_doc( + [ + status.HTTP_404_NOT_FOUND, + status.HTTP_409_CONFLICT, + ] + ), + dependencies=[Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.TASK_INSTANCE))], +) +def update_mapped_ti_hitl_detail( + dag_id: str, + dag_run_id: str, + task_id: str, + update_hitl_detail_payload: UpdateHITLDetailPayload, + user: GetUserDep, + session: SessionDep, + map_index: int, +) -> HITLDetailResponse: + """Update a Human-in-the-loop detail.""" + return _update_hitl_detail( + dag_id=dag_id, + dag_run_id=dag_run_id, + task_id=task_id, + session=session, + update_hitl_detail_payload=update_hitl_detail_payload, + user=user, + map_index=map_index, + ) + + +@hitl_router.get( + "/{dag_id}/{dag_run_id}/{task_id}", + status_code=status.HTTP_200_OK, + responses=create_openapi_http_exception_doc([status.HTTP_404_NOT_FOUND]), + dependencies=[Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.TASK_INSTANCE))], +) +def get_hitl_detail( + dag_id: str, + dag_run_id: str, + task_id: str, + session: SessionDep, +) -> HITLDetail: + """Get a Human-in-the-loop detail of a specific task instance.""" + return _get_hitl_detail( + dag_id=dag_id, + dag_run_id=dag_run_id, + task_id=task_id, + session=session, + map_index=None, + ) + + +@hitl_router.get( + "/{dag_id}/{dag_run_id}/{task_id}/{map_index}", + status_code=status.HTTP_200_OK, + responses=create_openapi_http_exception_doc([status.HTTP_404_NOT_FOUND]), + dependencies=[Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.TASK_INSTANCE))], +) +def get_mapped_ti_hitl_detail( + dag_id: str, + dag_run_id: str, + task_id: str, + session: SessionDep, + map_index: int, +) -> HITLDetail: + """Get a Human-in-the-loop detail of a specific task instance.""" + return _get_hitl_detail( + dag_id=dag_id, + dag_run_id=dag_run_id, + task_id=task_id, + session=session, + map_index=map_index, + ) + + @hitl_router.get( "/", status_code=status.HTTP_200_OK, diff --git a/airflow-core/src/airflow/ui/openapi-gen/queries/common.ts b/airflow-core/src/airflow/ui/openapi-gen/queries/common.ts index 8c33e0cbe20ac..91fd42bd07909 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/queries/common.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/queries/common.ts @@ -623,9 +623,20 @@ export const UseDagVersionServiceGetDagVersionsKeyFn = ({ bundleName, bundleVers export type HumanInTheLoopServiceGetHitlDetailDefaultResponse = Awaited>; export type HumanInTheLoopServiceGetHitlDetailQueryResult = UseQueryResult; export const useHumanInTheLoopServiceGetHitlDetailKey = "HumanInTheLoopServiceGetHitlDetail"; -export const UseHumanInTheLoopServiceGetHitlDetailKeyFn = ({ taskInstanceId }: { - taskInstanceId: string; -}, queryKey?: Array) => [useHumanInTheLoopServiceGetHitlDetailKey, ...(queryKey ?? [{ taskInstanceId }])]; +export const UseHumanInTheLoopServiceGetHitlDetailKeyFn = ({ dagId, dagRunId, taskId }: { + dagId: string; + dagRunId: string; + taskId: string; +}, queryKey?: Array) => [useHumanInTheLoopServiceGetHitlDetailKey, ...(queryKey ?? [{ dagId, dagRunId, taskId }])]; +export type HumanInTheLoopServiceGetMappedTiHitlDetailDefaultResponse = Awaited>; +export type HumanInTheLoopServiceGetMappedTiHitlDetailQueryResult = UseQueryResult; +export const useHumanInTheLoopServiceGetMappedTiHitlDetailKey = "HumanInTheLoopServiceGetMappedTiHitlDetail"; +export const UseHumanInTheLoopServiceGetMappedTiHitlDetailKeyFn = ({ dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; +}, queryKey?: Array) => [useHumanInTheLoopServiceGetMappedTiHitlDetailKey, ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId }])]; export type HumanInTheLoopServiceGetHitlDetailsDefaultResponse = Awaited>; export type HumanInTheLoopServiceGetHitlDetailsQueryResult = UseQueryResult; export const useHumanInTheLoopServiceGetHitlDetailsKey = "HumanInTheLoopServiceGetHitlDetails"; @@ -763,6 +774,7 @@ export type XcomServiceUpdateXcomEntryMutationResult = Awaited>; export type VariableServiceBulkVariablesMutationResult = Awaited>; export type HumanInTheLoopServiceUpdateHitlDetailMutationResult = Awaited>; +export type HumanInTheLoopServiceUpdateMappedTiHitlDetailMutationResult = Awaited>; export type AssetServiceDeleteAssetQueuedEventsMutationResult = Awaited>; export type AssetServiceDeleteDagAssetQueuedEventsMutationResult = Awaited>; export type AssetServiceDeleteDagAssetQueuedEventMutationResult = Awaited>; diff --git a/airflow-core/src/airflow/ui/openapi-gen/queries/ensureQueryData.ts b/airflow-core/src/airflow/ui/openapi-gen/queries/ensureQueryData.ts index f6c34b4a1c3fe..d10b539687bf5 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/queries/ensureQueryData.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/queries/ensureQueryData.ts @@ -1175,13 +1175,34 @@ export const ensureUseDagVersionServiceGetDagVersionsData = (queryClient: QueryC * Get Hitl Detail * Get a Human-in-the-loop detail of a specific task instance. * @param data The data for the request. -* @param data.taskInstanceId +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @returns HITLDetail Successful Response +* @throws ApiError +*/ +export const ensureUseHumanInTheLoopServiceGetHitlDetailData = (queryClient: QueryClient, { dagId, dagRunId, taskId }: { + dagId: string; + dagRunId: string; + taskId: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseHumanInTheLoopServiceGetHitlDetailKeyFn({ dagId, dagRunId, taskId }), queryFn: () => HumanInTheLoopService.getHitlDetail({ dagId, dagRunId, taskId }) }); +/** +* Get Mapped Ti Hitl Detail +* Get a Human-in-the-loop detail of a specific task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex * @returns HITLDetail Successful Response * @throws ApiError */ -export const ensureUseHumanInTheLoopServiceGetHitlDetailData = (queryClient: QueryClient, { taskInstanceId }: { - taskInstanceId: string; -}) => queryClient.ensureQueryData({ queryKey: Common.UseHumanInTheLoopServiceGetHitlDetailKeyFn({ taskInstanceId }), queryFn: () => HumanInTheLoopService.getHitlDetail({ taskInstanceId }) }); +export const ensureUseHumanInTheLoopServiceGetMappedTiHitlDetailData = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseHumanInTheLoopServiceGetMappedTiHitlDetailKeyFn({ dagId, dagRunId, mapIndex, taskId }), queryFn: () => HumanInTheLoopService.getMappedTiHitlDetail({ dagId, dagRunId, mapIndex, taskId }) }); /** * Get Hitl Details * Get Human-in-the-loop details. diff --git a/airflow-core/src/airflow/ui/openapi-gen/queries/prefetch.ts b/airflow-core/src/airflow/ui/openapi-gen/queries/prefetch.ts index 5993440cc9bcc..2eab4c35b3ef4 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/queries/prefetch.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/queries/prefetch.ts @@ -1175,13 +1175,34 @@ export const prefetchUseDagVersionServiceGetDagVersions = (queryClient: QueryCli * Get Hitl Detail * Get a Human-in-the-loop detail of a specific task instance. * @param data The data for the request. -* @param data.taskInstanceId +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @returns HITLDetail Successful Response +* @throws ApiError +*/ +export const prefetchUseHumanInTheLoopServiceGetHitlDetail = (queryClient: QueryClient, { dagId, dagRunId, taskId }: { + dagId: string; + dagRunId: string; + taskId: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseHumanInTheLoopServiceGetHitlDetailKeyFn({ dagId, dagRunId, taskId }), queryFn: () => HumanInTheLoopService.getHitlDetail({ dagId, dagRunId, taskId }) }); +/** +* Get Mapped Ti Hitl Detail +* Get a Human-in-the-loop detail of a specific task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex * @returns HITLDetail Successful Response * @throws ApiError */ -export const prefetchUseHumanInTheLoopServiceGetHitlDetail = (queryClient: QueryClient, { taskInstanceId }: { - taskInstanceId: string; -}) => queryClient.prefetchQuery({ queryKey: Common.UseHumanInTheLoopServiceGetHitlDetailKeyFn({ taskInstanceId }), queryFn: () => HumanInTheLoopService.getHitlDetail({ taskInstanceId }) }); +export const prefetchUseHumanInTheLoopServiceGetMappedTiHitlDetail = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseHumanInTheLoopServiceGetMappedTiHitlDetailKeyFn({ dagId, dagRunId, mapIndex, taskId }), queryFn: () => HumanInTheLoopService.getMappedTiHitlDetail({ dagId, dagRunId, mapIndex, taskId }) }); /** * Get Hitl Details * Get Human-in-the-loop details. diff --git a/airflow-core/src/airflow/ui/openapi-gen/queries/queries.ts b/airflow-core/src/airflow/ui/openapi-gen/queries/queries.ts index d23a6760a26e9..f47a175614451 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/queries/queries.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/queries/queries.ts @@ -1175,13 +1175,34 @@ export const useDagVersionServiceGetDagVersions = = unknown[]>({ dagId, dagRunId, taskId }: { + dagId: string; + dagRunId: string; + taskId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseHumanInTheLoopServiceGetHitlDetailKeyFn({ dagId, dagRunId, taskId }, queryKey), queryFn: () => HumanInTheLoopService.getHitlDetail({ dagId, dagRunId, taskId }) as TData, ...options }); +/** +* Get Mapped Ti Hitl Detail +* Get a Human-in-the-loop detail of a specific task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex * @returns HITLDetail Successful Response * @throws ApiError */ -export const useHumanInTheLoopServiceGetHitlDetail = = unknown[]>({ taskInstanceId }: { - taskInstanceId: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseHumanInTheLoopServiceGetHitlDetailKeyFn({ taskInstanceId }, queryKey), queryFn: () => HumanInTheLoopService.getHitlDetail({ taskInstanceId }) as TData, ...options }); +export const useHumanInTheLoopServiceGetMappedTiHitlDetail = = unknown[]>({ dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseHumanInTheLoopServiceGetMappedTiHitlDetailKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), queryFn: () => HumanInTheLoopService.getMappedTiHitlDetail({ dagId, dagRunId, mapIndex, taskId }) as TData, ...options }); /** * Get Hitl Details * Get Human-in-the-loop details. @@ -2010,18 +2031,49 @@ export const useVariableServiceBulkVariables = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ requestBody, taskInstanceId }) => HumanInTheLoopService.updateHitlDetail({ requestBody, taskInstanceId }) as unknown as Promise, ...options }); + taskId: string; +}, TContext>({ mutationFn: ({ dagId, dagRunId, requestBody, taskId }) => HumanInTheLoopService.updateHitlDetail({ dagId, dagRunId, requestBody, taskId }) as unknown as Promise, ...options }); +/** +* Update Mapped Ti Hitl Detail +* Update a Human-in-the-loop detail. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @param data.requestBody +* @returns HITLDetailResponse Successful Response +* @throws ApiError +*/ +export const useHumanInTheLoopServiceUpdateMappedTiHitlDetail = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId, dagRunId, mapIndex, requestBody, taskId }) => HumanInTheLoopService.updateMappedTiHitlDetail({ dagId, dagRunId, mapIndex, requestBody, taskId }) as unknown as Promise, ...options }); /** * Delete Asset Queued Events * Delete queued asset events for an asset. diff --git a/airflow-core/src/airflow/ui/openapi-gen/queries/suspense.ts b/airflow-core/src/airflow/ui/openapi-gen/queries/suspense.ts index 4564289a972e8..2f9e37e78d6c2 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/queries/suspense.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/queries/suspense.ts @@ -1175,13 +1175,34 @@ export const useDagVersionServiceGetDagVersionsSuspense = = unknown[]>({ dagId, dagRunId, taskId }: { + dagId: string; + dagRunId: string; + taskId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseHumanInTheLoopServiceGetHitlDetailKeyFn({ dagId, dagRunId, taskId }, queryKey), queryFn: () => HumanInTheLoopService.getHitlDetail({ dagId, dagRunId, taskId }) as TData, ...options }); +/** +* Get Mapped Ti Hitl Detail +* Get a Human-in-the-loop detail of a specific task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex * @returns HITLDetail Successful Response * @throws ApiError */ -export const useHumanInTheLoopServiceGetHitlDetailSuspense = = unknown[]>({ taskInstanceId }: { - taskInstanceId: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseHumanInTheLoopServiceGetHitlDetailKeyFn({ taskInstanceId }, queryKey), queryFn: () => HumanInTheLoopService.getHitlDetail({ taskInstanceId }) as TData, ...options }); +export const useHumanInTheLoopServiceGetMappedTiHitlDetailSuspense = = unknown[]>({ dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseHumanInTheLoopServiceGetMappedTiHitlDetailKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), queryFn: () => HumanInTheLoopService.getMappedTiHitlDetail({ dagId, dagRunId, mapIndex, taskId }) as TData, ...options }); /** * Get Hitl Details * Get Human-in-the-loop details. diff --git a/airflow-core/src/airflow/ui/openapi-gen/requests/schemas.gen.ts b/airflow-core/src/airflow/ui/openapi-gen/requests/schemas.gen.ts index 6611f24e9b9dc..2e31d61df5ae1 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/requests/schemas.gen.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/requests/schemas.gen.ts @@ -3436,7 +3436,7 @@ export const $HITLDetail = { ], title: 'Body' }, - default: { + defaults: { anyOf: [ { items: { @@ -3448,7 +3448,7 @@ export const $HITLDetail = { type: 'null' } ], - title: 'Default' + title: 'Defaults' }, multiple: { type: 'boolean', @@ -3483,7 +3483,7 @@ export const $HITLDetail = { ], title: 'Response At' }, - response_content: { + chosen_options: { anyOf: [ { items: { @@ -3495,7 +3495,7 @@ export const $HITLDetail = { type: 'null' } ], - title: 'Response Content' + title: 'Chosen Options' }, params_input: { additionalProperties: true, @@ -3545,12 +3545,12 @@ export const $HITLDetailResponse = { format: 'date-time', title: 'Response At' }, - response_content: { + chosen_options: { items: { type: 'string' }, type: 'array', - title: 'Response Content' + title: 'Chosen Options' }, params_input: { additionalProperties: true, @@ -3559,7 +3559,7 @@ export const $HITLDetailResponse = { } }, type: 'object', - required: ['user_id', 'response_at', 'response_content'], + required: ['user_id', 'response_at', 'chosen_options'], title: 'HITLDetailResponse', description: 'Response of updating a Human-in-the-loop detail.' } as const; @@ -5854,12 +5854,12 @@ export const $TriggererInfoResponse = { export const $UpdateHITLDetailPayload = { properties: { - response_content: { + chosen_options: { items: { type: 'string' }, type: 'array', - title: 'Response Content' + title: 'Chosen Options' }, params_input: { additionalProperties: true, @@ -5868,7 +5868,7 @@ export const $UpdateHITLDetailPayload = { } }, type: 'object', - required: ['response_content'], + required: ['chosen_options'], title: 'UpdateHITLDetailPayload', description: 'Schema for updating the content of a Human-in-the-loop detail.' } as const; diff --git a/airflow-core/src/airflow/ui/openapi-gen/requests/services.gen.ts b/airflow-core/src/airflow/ui/openapi-gen/requests/services.gen.ts index 3a9443b6def48..b935e2042366d 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/requests/services.gen.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/requests/services.gen.ts @@ -3,7 +3,7 @@ import type { CancelablePromise } from './core/CancelablePromise'; import { OpenAPI } from './core/OpenAPI'; import { request as __request } from './core/request'; -import type { GetAssetsData, GetAssetsResponse, GetAssetAliasesData, GetAssetAliasesResponse, GetAssetAliasData, GetAssetAliasResponse, GetAssetEventsData, GetAssetEventsResponse, CreateAssetEventData, CreateAssetEventResponse, MaterializeAssetData, MaterializeAssetResponse, GetAssetQueuedEventsData, GetAssetQueuedEventsResponse, DeleteAssetQueuedEventsData, DeleteAssetQueuedEventsResponse, GetAssetData, GetAssetResponse, GetDagAssetQueuedEventsData, GetDagAssetQueuedEventsResponse, DeleteDagAssetQueuedEventsData, DeleteDagAssetQueuedEventsResponse, GetDagAssetQueuedEventData, GetDagAssetQueuedEventResponse, DeleteDagAssetQueuedEventData, DeleteDagAssetQueuedEventResponse, NextRunAssetsData, NextRunAssetsResponse, ListBackfillsData, ListBackfillsResponse, CreateBackfillData, CreateBackfillResponse, GetBackfillData, GetBackfillResponse, PauseBackfillData, PauseBackfillResponse, UnpauseBackfillData, UnpauseBackfillResponse, CancelBackfillData, CancelBackfillResponse, CreateBackfillDryRunData, CreateBackfillDryRunResponse, ListBackfillsUiData, ListBackfillsUiResponse, DeleteConnectionData, DeleteConnectionResponse, GetConnectionData, GetConnectionResponse, PatchConnectionData, PatchConnectionResponse, GetConnectionsData, GetConnectionsResponse, PostConnectionData, PostConnectionResponse, BulkConnectionsData, BulkConnectionsResponse, TestConnectionData, TestConnectionResponse, CreateDefaultConnectionsResponse, HookMetaDataResponse, GetDagRunData, GetDagRunResponse, DeleteDagRunData, DeleteDagRunResponse, PatchDagRunData, PatchDagRunResponse, GetUpstreamAssetEventsData, GetUpstreamAssetEventsResponse, ClearDagRunData, ClearDagRunResponse, GetDagRunsData, GetDagRunsResponse, TriggerDagRunData, TriggerDagRunResponse, WaitDagRunUntilFinishedData, WaitDagRunUntilFinishedResponse, GetListDagRunsBatchData, GetListDagRunsBatchResponse, GetDagSourceData, GetDagSourceResponse, GetDagStatsData, GetDagStatsResponse, GetDagReportsData, GetDagReportsResponse, GetConfigData, GetConfigResponse, GetConfigValueData, GetConfigValueResponse, GetConfigsResponse, ListDagWarningsData, ListDagWarningsResponse, GetDagsData, GetDagsResponse, PatchDagsData, PatchDagsResponse, GetDagData, GetDagResponse, PatchDagData, PatchDagResponse, DeleteDagData, DeleteDagResponse, GetDagDetailsData, GetDagDetailsResponse, FavoriteDagData, FavoriteDagResponse, UnfavoriteDagData, UnfavoriteDagResponse, GetDagTagsData, GetDagTagsResponse, GetDagsUiData, GetDagsUiResponse, GetEventLogData, GetEventLogResponse, GetEventLogsData, GetEventLogsResponse, GetExtraLinksData, GetExtraLinksResponse, GetTaskInstanceData, GetTaskInstanceResponse, PatchTaskInstanceData, PatchTaskInstanceResponse, DeleteTaskInstanceData, DeleteTaskInstanceResponse, GetMappedTaskInstancesData, GetMappedTaskInstancesResponse, GetTaskInstanceDependenciesByMapIndexData, GetTaskInstanceDependenciesByMapIndexResponse, GetTaskInstanceDependenciesData, GetTaskInstanceDependenciesResponse, GetTaskInstanceTriesData, GetTaskInstanceTriesResponse, GetMappedTaskInstanceTriesData, GetMappedTaskInstanceTriesResponse, GetMappedTaskInstanceData, GetMappedTaskInstanceResponse, PatchTaskInstanceByMapIndexData, PatchTaskInstanceByMapIndexResponse, GetTaskInstancesData, GetTaskInstancesResponse, BulkTaskInstancesData, BulkTaskInstancesResponse, GetTaskInstancesBatchData, GetTaskInstancesBatchResponse, GetTaskInstanceTryDetailsData, GetTaskInstanceTryDetailsResponse, GetMappedTaskInstanceTryDetailsData, GetMappedTaskInstanceTryDetailsResponse, PostClearTaskInstancesData, PostClearTaskInstancesResponse, PatchTaskInstanceDryRunByMapIndexData, PatchTaskInstanceDryRunByMapIndexResponse, PatchTaskInstanceDryRunData, PatchTaskInstanceDryRunResponse, GetLogData, GetLogResponse, GetExternalLogUrlData, GetExternalLogUrlResponse, GetImportErrorData, GetImportErrorResponse, GetImportErrorsData, GetImportErrorsResponse, GetJobsData, GetJobsResponse, GetPluginsData, GetPluginsResponse, ImportErrorsResponse, DeletePoolData, DeletePoolResponse, GetPoolData, GetPoolResponse, PatchPoolData, PatchPoolResponse, GetPoolsData, GetPoolsResponse, PostPoolData, PostPoolResponse, BulkPoolsData, BulkPoolsResponse, GetProvidersData, GetProvidersResponse, GetXcomEntryData, GetXcomEntryResponse, UpdateXcomEntryData, UpdateXcomEntryResponse, GetXcomEntriesData, GetXcomEntriesResponse, CreateXcomEntryData, CreateXcomEntryResponse, GetTasksData, GetTasksResponse, GetTaskData, GetTaskResponse, DeleteVariableData, DeleteVariableResponse, GetVariableData, GetVariableResponse, PatchVariableData, PatchVariableResponse, GetVariablesData, GetVariablesResponse, PostVariableData, PostVariableResponse, BulkVariablesData, BulkVariablesResponse, ReparseDagFileData, ReparseDagFileResponse, GetDagVersionData, GetDagVersionResponse, GetDagVersionsData, GetDagVersionsResponse, UpdateHitlDetailData, UpdateHitlDetailResponse, GetHitlDetailData, GetHitlDetailResponse, GetHitlDetailsResponse, GetHealthResponse, GetVersionResponse, LoginData, LoginResponse, LogoutData, LogoutResponse, GetAuthMenusResponse, GetDependenciesData, GetDependenciesResponse, HistoricalMetricsData, HistoricalMetricsResponse, DagStatsResponse2, StructureDataData, StructureDataResponse2, GetDagStructureData, GetDagStructureResponse, GetGridRunsData, GetGridRunsResponse, GetGridTiSummariesData, GetGridTiSummariesResponse, GetLatestRunData, GetLatestRunResponse, GetCalendarData, GetCalendarResponse } from './types.gen'; +import type { GetAssetsData, GetAssetsResponse, GetAssetAliasesData, GetAssetAliasesResponse, GetAssetAliasData, GetAssetAliasResponse, GetAssetEventsData, GetAssetEventsResponse, CreateAssetEventData, CreateAssetEventResponse, MaterializeAssetData, MaterializeAssetResponse, GetAssetQueuedEventsData, GetAssetQueuedEventsResponse, DeleteAssetQueuedEventsData, DeleteAssetQueuedEventsResponse, GetAssetData, GetAssetResponse, GetDagAssetQueuedEventsData, GetDagAssetQueuedEventsResponse, DeleteDagAssetQueuedEventsData, DeleteDagAssetQueuedEventsResponse, GetDagAssetQueuedEventData, GetDagAssetQueuedEventResponse, DeleteDagAssetQueuedEventData, DeleteDagAssetQueuedEventResponse, NextRunAssetsData, NextRunAssetsResponse, ListBackfillsData, ListBackfillsResponse, CreateBackfillData, CreateBackfillResponse, GetBackfillData, GetBackfillResponse, PauseBackfillData, PauseBackfillResponse, UnpauseBackfillData, UnpauseBackfillResponse, CancelBackfillData, CancelBackfillResponse, CreateBackfillDryRunData, CreateBackfillDryRunResponse, ListBackfillsUiData, ListBackfillsUiResponse, DeleteConnectionData, DeleteConnectionResponse, GetConnectionData, GetConnectionResponse, PatchConnectionData, PatchConnectionResponse, GetConnectionsData, GetConnectionsResponse, PostConnectionData, PostConnectionResponse, BulkConnectionsData, BulkConnectionsResponse, TestConnectionData, TestConnectionResponse, CreateDefaultConnectionsResponse, HookMetaDataResponse, GetDagRunData, GetDagRunResponse, DeleteDagRunData, DeleteDagRunResponse, PatchDagRunData, PatchDagRunResponse, GetUpstreamAssetEventsData, GetUpstreamAssetEventsResponse, ClearDagRunData, ClearDagRunResponse, GetDagRunsData, GetDagRunsResponse, TriggerDagRunData, TriggerDagRunResponse, WaitDagRunUntilFinishedData, WaitDagRunUntilFinishedResponse, GetListDagRunsBatchData, GetListDagRunsBatchResponse, GetDagSourceData, GetDagSourceResponse, GetDagStatsData, GetDagStatsResponse, GetDagReportsData, GetDagReportsResponse, GetConfigData, GetConfigResponse, GetConfigValueData, GetConfigValueResponse, GetConfigsResponse, ListDagWarningsData, ListDagWarningsResponse, GetDagsData, GetDagsResponse, PatchDagsData, PatchDagsResponse, GetDagData, GetDagResponse, PatchDagData, PatchDagResponse, DeleteDagData, DeleteDagResponse, GetDagDetailsData, GetDagDetailsResponse, FavoriteDagData, FavoriteDagResponse, UnfavoriteDagData, UnfavoriteDagResponse, GetDagTagsData, GetDagTagsResponse, GetDagsUiData, GetDagsUiResponse, GetEventLogData, GetEventLogResponse, GetEventLogsData, GetEventLogsResponse, GetExtraLinksData, GetExtraLinksResponse, GetTaskInstanceData, GetTaskInstanceResponse, PatchTaskInstanceData, PatchTaskInstanceResponse, DeleteTaskInstanceData, DeleteTaskInstanceResponse, GetMappedTaskInstancesData, GetMappedTaskInstancesResponse, GetTaskInstanceDependenciesByMapIndexData, GetTaskInstanceDependenciesByMapIndexResponse, GetTaskInstanceDependenciesData, GetTaskInstanceDependenciesResponse, GetTaskInstanceTriesData, GetTaskInstanceTriesResponse, GetMappedTaskInstanceTriesData, GetMappedTaskInstanceTriesResponse, GetMappedTaskInstanceData, GetMappedTaskInstanceResponse, PatchTaskInstanceByMapIndexData, PatchTaskInstanceByMapIndexResponse, GetTaskInstancesData, GetTaskInstancesResponse, BulkTaskInstancesData, BulkTaskInstancesResponse, GetTaskInstancesBatchData, GetTaskInstancesBatchResponse, GetTaskInstanceTryDetailsData, GetTaskInstanceTryDetailsResponse, GetMappedTaskInstanceTryDetailsData, GetMappedTaskInstanceTryDetailsResponse, PostClearTaskInstancesData, PostClearTaskInstancesResponse, PatchTaskInstanceDryRunByMapIndexData, PatchTaskInstanceDryRunByMapIndexResponse, PatchTaskInstanceDryRunData, PatchTaskInstanceDryRunResponse, GetLogData, GetLogResponse, GetExternalLogUrlData, GetExternalLogUrlResponse, GetImportErrorData, GetImportErrorResponse, GetImportErrorsData, GetImportErrorsResponse, GetJobsData, GetJobsResponse, GetPluginsData, GetPluginsResponse, ImportErrorsResponse, DeletePoolData, DeletePoolResponse, GetPoolData, GetPoolResponse, PatchPoolData, PatchPoolResponse, GetPoolsData, GetPoolsResponse, PostPoolData, PostPoolResponse, BulkPoolsData, BulkPoolsResponse, GetProvidersData, GetProvidersResponse, GetXcomEntryData, GetXcomEntryResponse, UpdateXcomEntryData, UpdateXcomEntryResponse, GetXcomEntriesData, GetXcomEntriesResponse, CreateXcomEntryData, CreateXcomEntryResponse, GetTasksData, GetTasksResponse, GetTaskData, GetTaskResponse, DeleteVariableData, DeleteVariableResponse, GetVariableData, GetVariableResponse, PatchVariableData, PatchVariableResponse, GetVariablesData, GetVariablesResponse, PostVariableData, PostVariableResponse, BulkVariablesData, BulkVariablesResponse, ReparseDagFileData, ReparseDagFileResponse, GetDagVersionData, GetDagVersionResponse, GetDagVersionsData, GetDagVersionsResponse, UpdateHitlDetailData, UpdateHitlDetailResponse, GetHitlDetailData, GetHitlDetailResponse, UpdateMappedTiHitlDetailData, UpdateMappedTiHitlDetailResponse, GetMappedTiHitlDetailData, GetMappedTiHitlDetailResponse, GetHitlDetailsResponse, GetHealthResponse, GetVersionResponse, LoginData, LoginResponse, LogoutData, LogoutResponse, GetAuthMenusResponse, GetDependenciesData, GetDependenciesResponse, HistoricalMetricsData, HistoricalMetricsResponse, DagStatsResponse2, StructureDataData, StructureDataResponse2, GetDagStructureData, GetDagStructureResponse, GetGridRunsData, GetGridRunsResponse, GetGridTiSummariesData, GetGridTiSummariesResponse, GetLatestRunData, GetLatestRunResponse, GetCalendarData, GetCalendarResponse } from './types.gen'; export class AssetService { /** @@ -3365,7 +3365,9 @@ export class HumanInTheLoopService { * Update Hitl Detail * Update a Human-in-the-loop detail. * @param data The data for the request. - * @param data.taskInstanceId + * @param data.dagId + * @param data.dagRunId + * @param data.taskId * @param data.requestBody * @returns HITLDetailResponse Successful Response * @throws ApiError @@ -3373,9 +3375,11 @@ export class HumanInTheLoopService { public static updateHitlDetail(data: UpdateHitlDetailData): CancelablePromise { return __request(OpenAPI, { method: 'PATCH', - url: '/api/v2/hitl-details/{task_instance_id}', + url: '/api/v2/hitl-details/{dag_id}/{dag_run_id}/{task_id}', path: { - task_instance_id: data.taskInstanceId + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId }, body: data.requestBody, mediaType: 'application/json', @@ -3393,16 +3397,84 @@ export class HumanInTheLoopService { * Get Hitl Detail * Get a Human-in-the-loop detail of a specific task instance. * @param data The data for the request. - * @param data.taskInstanceId + * @param data.dagId + * @param data.dagRunId + * @param data.taskId * @returns HITLDetail Successful Response * @throws ApiError */ public static getHitlDetail(data: GetHitlDetailData): CancelablePromise { return __request(OpenAPI, { method: 'GET', - url: '/api/v2/hitl-details/{task_instance_id}', + url: '/api/v2/hitl-details/{dag_id}/{dag_run_id}/{task_id}', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Update Mapped Ti Hitl Detail + * Update a Human-in-the-loop detail. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @param data.requestBody + * @returns HITLDetailResponse Successful Response + * @throws ApiError + */ + public static updateMappedTiHitlDetail(data: UpdateMappedTiHitlDetailData): CancelablePromise { + return __request(OpenAPI, { + method: 'PATCH', + url: '/api/v2/hitl-details/{dag_id}/{dag_run_id}/{task_id}/{map_index}', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId, + map_index: data.mapIndex + }, + body: data.requestBody, + mediaType: 'application/json', + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 409: 'Conflict', + 422: 'Validation Error' + } + }); + } + + /** + * Get Mapped Ti Hitl Detail + * Get a Human-in-the-loop detail of a specific task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns HITLDetail Successful Response + * @throws ApiError + */ + public static getMappedTiHitlDetail(data: GetMappedTiHitlDetailData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/hitl-details/{dag_id}/{dag_run_id}/{task_id}/{map_index}', path: { - task_instance_id: data.taskInstanceId + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId, + map_index: data.mapIndex }, errors: { 401: 'Unauthorized', diff --git a/airflow-core/src/airflow/ui/openapi-gen/requests/types.gen.ts b/airflow-core/src/airflow/ui/openapi-gen/requests/types.gen.ts index e45ab6ea185f8..591ce7884373c 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/requests/types.gen.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/requests/types.gen.ts @@ -925,14 +925,14 @@ export type HITLDetail = { options: Array<(string)>; subject: string; body?: string | null; - default?: Array<(string)> | null; + defaults?: Array<(string)> | null; multiple?: boolean; params?: { [key: string]: unknown; }; user_id?: string | null; response_at?: string | null; - response_content?: Array<(string)> | null; + chosen_options?: Array<(string)> | null; params_input?: { [key: string]: unknown; }; @@ -953,7 +953,7 @@ export type HITLDetailCollection = { export type HITLDetailResponse = { user_id: string; response_at: string; - response_content: Array<(string)>; + chosen_options: Array<(string)>; params_input?: { [key: string]: unknown; }; @@ -1475,7 +1475,7 @@ export type TriggererInfoResponse = { * Schema for updating the content of a Human-in-the-loop detail. */ export type UpdateHITLDetailPayload = { - response_content: Array<(string)>; + chosen_options: Array<(string)>; params_input?: { [key: string]: unknown; }; @@ -2900,18 +2900,41 @@ export type GetDagVersionsData = { export type GetDagVersionsResponse = DAGVersionCollectionResponse; export type UpdateHitlDetailData = { + dagId: string; + dagRunId: string; requestBody: UpdateHITLDetailPayload; - taskInstanceId: string; + taskId: string; }; export type UpdateHitlDetailResponse = HITLDetailResponse; export type GetHitlDetailData = { - taskInstanceId: string; + dagId: string; + dagRunId: string; + taskId: string; }; export type GetHitlDetailResponse = HITLDetail; +export type UpdateMappedTiHitlDetailData = { + dagId: string; + dagRunId: string; + mapIndex: number; + requestBody: UpdateHITLDetailPayload; + taskId: string; +}; + +export type UpdateMappedTiHitlDetailResponse = HITLDetailResponse; + +export type GetMappedTiHitlDetailData = { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; +}; + +export type GetMappedTiHitlDetailResponse = HITLDetail; + export type GetHitlDetailsResponse = HITLDetailCollection; export type GetHealthResponse = HealthInfoResponse; @@ -5860,7 +5883,7 @@ export type $OpenApiTs = { }; }; }; - '/api/v2/hitl-details/{task_instance_id}': { + '/api/v2/hitl-details/{dag_id}/{dag_run_id}/{task_id}': { patch: { req: UpdateHitlDetailData; res: { @@ -5916,6 +5939,62 @@ export type $OpenApiTs = { }; }; }; + '/api/v2/hitl-details/{dag_id}/{dag_run_id}/{task_id}/{map_index}': { + patch: { + req: UpdateMappedTiHitlDetailData; + res: { + /** + * Successful Response + */ + 200: HITLDetailResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Conflict + */ + 409: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + get: { + req: GetMappedTiHitlDetailData; + res: { + /** + * Successful Response + */ + 200: HITLDetail; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; '/api/v2/hitl-details/': { get: { res: { diff --git a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_hitl.py b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_hitl.py index 7bbe7153e8257..f2d9d887bdfe8 100644 --- a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_hitl.py +++ b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_hitl.py @@ -27,7 +27,6 @@ from typing import TYPE_CHECKING, Any import time_machine -from uuid6 import uuid7 from airflow.models.hitl import HITLDetail @@ -36,7 +35,8 @@ pytestmark = pytest.mark.db_test -TI_ID = uuid7() + +DAG_ID = "test_hitl_dag" @pytest.fixture @@ -82,9 +82,9 @@ def expected_sample_hitl_detail_dict(sample_ti) -> dict[str, Any]: class TestUpdateHITLDetailEndpoint: @time_machine.travel(datetime(2025, 7, 3, 0, 0, 0), tick=False) @pytest.mark.usefixtures("sample_hitl_detail") - def test_should_respond_200_with_existing_response(self, test_client, sample_ti): + def test_should_respond_200_with_existing_response(self, test_client, sample_ti, session): response = test_client.patch( - f"/hitl-details/{sample_ti.id}", + f"/hitl-details/{sample_ti.dag_id}/{sample_ti.run_id}/{sample_ti.task.task_id}", json={"chosen_options": ["Approve"], "params_input": {"input_1": 2}}, ) @@ -97,20 +97,23 @@ def test_should_respond_200_with_existing_response(self, test_client, sample_ti) } def test_should_respond_404(self, test_client, sample_ti): - response = test_client.get(f"/hitl-details/{sample_ti.id}") + response = test_client.get( + f"/hitl-details/{sample_ti.dag_id}/{sample_ti.run_id}/{sample_ti.task.task_id}" + ) assert response.status_code == 404 assert response.json() == { - "detail": { - "message": "Human-in-the-loop detail not found", - "reason": "not_found", - }, + "detail": ( + f"The Task Instance with dag_id: `{sample_ti.dag_id}`," + f" run_id: `{sample_ti.run_id}`, task_id: `{sample_ti.task.task_id}`" + " and map_index: `None` was not found" + ), } @time_machine.travel(datetime(2025, 7, 3, 0, 0, 0), tick=False) @pytest.mark.usefixtures("sample_hitl_detail") - def test_should_respond_409(self, test_client, sample_ti, expected_sample_hitl_detail_dict): + def test_should_respond_409(self, test_client, sample_ti): response = test_client.patch( - f"/hitl-details/{sample_ti.id}", + f"/hitl-details/{sample_ti.dag_id}/{sample_ti.run_id}/{sample_ti.task.task_id}", json={"chosen_options": ["Approve"], "params_input": {"input_1": 2}}, ) @@ -124,7 +127,7 @@ def test_should_respond_409(self, test_client, sample_ti, expected_sample_hitl_d assert response.json() == expected_response response = test_client.patch( - f"/hitl-details/{sample_ti.id}", + f"/hitl-details/{sample_ti.dag_id}/{sample_ti.run_id}/{sample_ti.task.task_id}", json={"chosen_options": ["Approve"], "params_input": {"input_1": 2}}, ) assert response.status_code == 409 @@ -137,11 +140,15 @@ def test_should_respond_409(self, test_client, sample_ti, expected_sample_hitl_d } def test_should_respond_401(self, unauthenticated_test_client, sample_ti): - response = unauthenticated_test_client.get(f"/hitl-details/{sample_ti.id}") + response = unauthenticated_test_client.get( + f"/hitl-details/{sample_ti.dag_id}/{sample_ti.run_id}/{sample_ti.task.task_id}" + ) assert response.status_code == 401 def test_should_respond_403(self, unauthorized_test_client, sample_ti): - response = unauthorized_test_client.get(f"/hitl-details/{sample_ti.id}") + response = unauthorized_test_client.get( + f"/hitl-details/{sample_ti.dag_id}/{sample_ti.run_id}/{sample_ti.task.task_id}" + ) assert response.status_code == 403 @@ -150,26 +157,35 @@ class TestGetHITLDetailEndpoint: def test_should_respond_200_with_existing_response( self, test_client, sample_ti, expected_sample_hitl_detail_dict ): - response = test_client.get(f"/hitl-details/{sample_ti.id}") + response = test_client.get( + f"/hitl-details/{sample_ti.dag_id}/{sample_ti.run_id}/{sample_ti.task.task_id}" + ) assert response.status_code == 200 assert response.json() == expected_sample_hitl_detail_dict def test_should_respond_404(self, test_client, sample_ti): - response = test_client.get(f"/hitl-details/{sample_ti.id}") + response = test_client.get( + f"/hitl-details/{sample_ti.dag_id}/{sample_ti.run_id}/{sample_ti.task.task_id}" + ) assert response.status_code == 404 assert response.json() == { - "detail": { - "message": "Human-in-the-loop detail not found", - "reason": "not_found", - }, + "detail": ( + f"The Task Instance with dag_id: `{sample_ti.dag_id}`," + f" run_id: `{sample_ti.run_id}`, task_id: `{sample_ti.task.task_id}`" + " and map_index: `None` was not found" + ), } def test_should_respond_401(self, unauthenticated_test_client, sample_ti): - response = unauthenticated_test_client.get(f"/hitl-details/{sample_ti.id}") + response = unauthenticated_test_client.get( + f"/hitl-details/{sample_ti.dag_id}/{sample_ti.run_id}/{sample_ti.task.task_id}" + ) assert response.status_code == 401 def test_should_respond_403(self, unauthorized_test_client, sample_ti): - response = unauthorized_test_client.get(f"/hitl-details/{sample_ti.id}") + response = unauthorized_test_client.get( + f"/hitl-details/{sample_ti.dag_id}/{sample_ti.run_id}/{sample_ti.task.task_id}" + ) assert response.status_code == 403 diff --git a/task-sdk/tests/task_sdk/execution_time/test_hitl.py b/task-sdk/tests/task_sdk/execution_time/test_hitl.py index 7f717dfa85c40..cab17e30bacec 100644 --- a/task-sdk/tests/task_sdk/execution_time/test_hitl.py +++ b/task-sdk/tests/task_sdk/execution_time/test_hitl.py @@ -37,7 +37,7 @@ def test_add_hitl_detail(mock_supervisor_comms) -> None: options=["Approve", "Reject"], subject="Subject", body="Optional body", - default=["Approve", "Reject"], + defaults=["Approve", "Reject"], params={"input_1": 1}, multiple=False, ) @@ -47,7 +47,7 @@ def test_add_hitl_detail(mock_supervisor_comms) -> None: options=["Approve", "Reject"], subject="Subject", body="Optional body", - default=["Approve", "Reject"], + defaults=["Approve", "Reject"], params={"input_1": 1}, multiple=False, ) From 22b23c58a31f36702b1c24135b69e0059e13c272 Mon Sep 17 00:00:00 2001 From: Wei Lee Date: Wed, 9 Jul 2025 22:50:40 +0800 Subject: [PATCH 27/30] style(hitl): improve test typing --- .../core_api/routes/public/test_hitl.py | 153 +++++++++++------- 1 file changed, 96 insertions(+), 57 deletions(-) diff --git a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_hitl.py b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_hitl.py index f2d9d887bdfe8..a8e13e7df1ffa 100644 --- a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_hitl.py +++ b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_hitl.py @@ -17,6 +17,7 @@ from __future__ import annotations import pytest +from sqlalchemy.orm import Session from tests_common.test_utils.db import AIRFLOW_V_3_1_PLUS @@ -31,8 +32,12 @@ from airflow.models.hitl import HITLDetail if TYPE_CHECKING: + from fastapi.testclient import TestClient + from airflow.models.taskinstance import TaskInstance + from tests_common.pytest_plugin import CreateTaskInstance + pytestmark = pytest.mark.db_test @@ -40,12 +45,20 @@ @pytest.fixture -def sample_ti(create_task_instance) -> TaskInstance: +def sample_ti(create_task_instance: CreateTaskInstance) -> TaskInstance: return create_task_instance() @pytest.fixture -def sample_hitl_detail(session, sample_ti) -> HITLDetail: +def sample_ti_url_identifier(sample_ti: TaskInstance) -> str: + if TYPE_CHECKING: + assert sample_ti.task + + return f"{sample_ti.dag_id}/{sample_ti.run_id}/{sample_ti.task.task_id}" + + +@pytest.fixture +def sample_hitl_detail(sample_ti: TaskInstance, session: Session) -> HITLDetail: hitl_detail_model = HITLDetail( ti_id=sample_ti.id, options=["Approve", "Reject"], @@ -62,7 +75,19 @@ def sample_hitl_detail(session, sample_ti) -> HITLDetail: @pytest.fixture -def expected_sample_hitl_detail_dict(sample_ti) -> dict[str, Any]: +def expected_ti_not_found_error_msg(sample_ti: TaskInstance) -> str: + if TYPE_CHECKING: + assert sample_ti.task + + return ( + f"The Task Instance with dag_id: `{sample_ti.dag_id}`," + f" run_id: `{sample_ti.run_id}`, task_id: `{sample_ti.task.task_id}`" + " and map_index: `None` was not found" + ) + + +@pytest.fixture +def expected_sample_hitl_detail_dict(sample_ti: TaskInstance) -> dict[str, Any]: return { "body": "this is body", "defaults": ["Approve"], @@ -82,9 +107,13 @@ def expected_sample_hitl_detail_dict(sample_ti) -> dict[str, Any]: class TestUpdateHITLDetailEndpoint: @time_machine.travel(datetime(2025, 7, 3, 0, 0, 0), tick=False) @pytest.mark.usefixtures("sample_hitl_detail") - def test_should_respond_200_with_existing_response(self, test_client, sample_ti, session): + def test_should_respond_200_with_existing_response( + self, + test_client: TestClient, + sample_ti_url_identifier: str, + ) -> None: response = test_client.patch( - f"/hitl-details/{sample_ti.dag_id}/{sample_ti.run_id}/{sample_ti.task.task_id}", + f"/hitl-details/{sample_ti_url_identifier}", json={"chosen_options": ["Approve"], "params_input": {"input_1": 2}}, ) @@ -96,24 +125,26 @@ def test_should_respond_200_with_existing_response(self, test_client, sample_ti, "response_at": "2025-07-03T00:00:00Z", } - def test_should_respond_404(self, test_client, sample_ti): - response = test_client.get( - f"/hitl-details/{sample_ti.dag_id}/{sample_ti.run_id}/{sample_ti.task.task_id}" - ) + def test_should_respond_404( + self, + test_client: TestClient, + sample_ti_url_identifier: str, + expected_ti_not_found_error_msg: str, + ) -> None: + response = test_client.get(f"/hitl-details/{sample_ti_url_identifier}") assert response.status_code == 404 - assert response.json() == { - "detail": ( - f"The Task Instance with dag_id: `{sample_ti.dag_id}`," - f" run_id: `{sample_ti.run_id}`, task_id: `{sample_ti.task.task_id}`" - " and map_index: `None` was not found" - ), - } + assert response.json() == {"detail": expected_ti_not_found_error_msg} @time_machine.travel(datetime(2025, 7, 3, 0, 0, 0), tick=False) @pytest.mark.usefixtures("sample_hitl_detail") - def test_should_respond_409(self, test_client, sample_ti): + def test_should_respond_409( + self, + test_client: TestClient, + sample_ti_url_identifier: str, + sample_ti: TaskInstance, + ) -> None: response = test_client.patch( - f"/hitl-details/{sample_ti.dag_id}/{sample_ti.run_id}/{sample_ti.task.task_id}", + f"/hitl-details/{sample_ti_url_identifier}", json={"chosen_options": ["Approve"], "params_input": {"input_1": 2}}, ) @@ -127,7 +158,7 @@ def test_should_respond_409(self, test_client, sample_ti): assert response.json() == expected_response response = test_client.patch( - f"/hitl-details/{sample_ti.dag_id}/{sample_ti.run_id}/{sample_ti.task.task_id}", + f"/hitl-details/{sample_ti_url_identifier}", json={"chosen_options": ["Approve"], "params_input": {"input_1": 2}}, ) assert response.status_code == 409 @@ -139,61 +170,69 @@ def test_should_respond_409(self, test_client, sample_ti): ) } - def test_should_respond_401(self, unauthenticated_test_client, sample_ti): - response = unauthenticated_test_client.get( - f"/hitl-details/{sample_ti.dag_id}/{sample_ti.run_id}/{sample_ti.task.task_id}" - ) + def test_should_respond_401( + self, + unauthenticated_test_client: TestClient, + sample_ti_url_identifier: str, + ) -> None: + response = unauthenticated_test_client.get(f"/hitl-details/{sample_ti_url_identifier}") assert response.status_code == 401 - def test_should_respond_403(self, unauthorized_test_client, sample_ti): - response = unauthorized_test_client.get( - f"/hitl-details/{sample_ti.dag_id}/{sample_ti.run_id}/{sample_ti.task.task_id}" - ) + def test_should_respond_403( + self, + unauthorized_test_client: TestClient, + sample_ti_url_identifier: str, + ) -> None: + response = unauthorized_test_client.get(f"/hitl-details/{sample_ti_url_identifier}") assert response.status_code == 403 class TestGetHITLDetailEndpoint: @pytest.mark.usefixtures("sample_hitl_detail") def test_should_respond_200_with_existing_response( - self, test_client, sample_ti, expected_sample_hitl_detail_dict - ): - response = test_client.get( - f"/hitl-details/{sample_ti.dag_id}/{sample_ti.run_id}/{sample_ti.task.task_id}" - ) + self, + test_client: TestClient, + sample_ti_url_identifier: str, + expected_sample_hitl_detail_dict: dict[str, Any], + ) -> None: + response = test_client.get(f"/hitl-details/{sample_ti_url_identifier}") assert response.status_code == 200 assert response.json() == expected_sample_hitl_detail_dict - def test_should_respond_404(self, test_client, sample_ti): - response = test_client.get( - f"/hitl-details/{sample_ti.dag_id}/{sample_ti.run_id}/{sample_ti.task.task_id}" - ) + def test_should_respond_404( + self, + test_client: TestClient, + sample_ti_url_identifier: str, + expected_ti_not_found_error_msg: str, + ) -> None: + response = test_client.get(f"/hitl-details/{sample_ti_url_identifier}") assert response.status_code == 404 - assert response.json() == { - "detail": ( - f"The Task Instance with dag_id: `{sample_ti.dag_id}`," - f" run_id: `{sample_ti.run_id}`, task_id: `{sample_ti.task.task_id}`" - " and map_index: `None` was not found" - ), - } - - def test_should_respond_401(self, unauthenticated_test_client, sample_ti): - response = unauthenticated_test_client.get( - f"/hitl-details/{sample_ti.dag_id}/{sample_ti.run_id}/{sample_ti.task.task_id}" - ) + assert response.json() == {"detail": expected_ti_not_found_error_msg} + + def test_should_respond_401( + self, + unauthenticated_test_client: TestClient, + sample_ti_url_identifier: str, + ) -> None: + response = unauthenticated_test_client.get(f"/hitl-details/{sample_ti_url_identifier}") assert response.status_code == 401 - def test_should_respond_403(self, unauthorized_test_client, sample_ti): - response = unauthorized_test_client.get( - f"/hitl-details/{sample_ti.dag_id}/{sample_ti.run_id}/{sample_ti.task.task_id}" - ) + def test_should_respond_403( + self, + unauthorized_test_client: TestClient, + sample_ti_url_identifier: str, + ) -> None: + response = unauthorized_test_client.get(f"/hitl-details/{sample_ti_url_identifier}") assert response.status_code == 403 class TestGetHITLDetailsEndpoint: @pytest.mark.usefixtures("sample_hitl_detail") def test_should_respond_200_with_existing_response( - self, test_client, sample_ti, expected_sample_hitl_detail_dict - ): + self, + test_client: TestClient, + expected_sample_hitl_detail_dict: dict[str, Any], + ) -> None: response = test_client.get("/hitl-details/") assert response.status_code == 200 assert response.json() == { @@ -201,7 +240,7 @@ def test_should_respond_200_with_existing_response( "total_entries": 1, } - def test_should_respond_200_without_response(self, test_client): + def test_should_respond_200_without_response(self, test_client: TestClient) -> None: response = test_client.get("/hitl-details/") assert response.status_code == 200 assert response.json() == { @@ -209,10 +248,10 @@ def test_should_respond_200_without_response(self, test_client): "total_entries": 0, } - def test_should_respond_401(self, unauthenticated_test_client): + def test_should_respond_401(self, unauthenticated_test_client: TestClient) -> None: response = unauthenticated_test_client.get("/hitl-details/") assert response.status_code == 401 - def test_should_respond_403(self, unauthorized_test_client): + def test_should_respond_403(self, unauthorized_test_client: TestClient) -> None: response = unauthorized_test_client.get("/hitl-details/") assert response.status_code == 403 From c5fe122cb8c6d30766fc29fb6b3daec19dffe196 Mon Sep 17 00:00:00 2001 From: Wei Lee Date: Wed, 9 Jul 2025 22:56:14 +0800 Subject: [PATCH 28/30] test(hitl): add mapped test cases --- .../openapi/v2-rest-api-generated.yaml | 7 - .../core_api/routes/public/test_hitl.py | 134 ++++++++++++++++++ 2 files changed, 134 insertions(+), 7 deletions(-) diff --git a/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml b/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml index 0a5e659d2bb58..ae9a645fc2e0f 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml +++ b/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml @@ -7220,13 +7220,6 @@ paths: schema: type: string title: Task Id - - name: map_index - in: query - required: false - schema: - type: integer - default: -1 - title: Map Index responses: '200': description: Successful Response diff --git a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_hitl.py b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_hitl.py index a8e13e7df1ffa..3fa34a5779a37 100644 --- a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_hitl.py +++ b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_hitl.py @@ -86,6 +86,18 @@ def expected_ti_not_found_error_msg(sample_ti: TaskInstance) -> str: ) +@pytest.fixture +def expected_mapped_ti_not_found_error_msg(sample_ti: TaskInstance) -> str: + if TYPE_CHECKING: + assert sample_ti.task + + return ( + f"The Task Instance with dag_id: `{sample_ti.dag_id}`," + f" run_id: `{sample_ti.run_id}`, task_id: `{sample_ti.task.task_id}`" + " and map_index: `-1` was not found" + ) + + @pytest.fixture def expected_sample_hitl_detail_dict(sample_ti: TaskInstance) -> dict[str, Any]: return { @@ -187,6 +199,89 @@ def test_should_respond_403( assert response.status_code == 403 +class TestUpdateMappedTIHITLDetail: + @time_machine.travel(datetime(2025, 7, 3, 0, 0, 0), tick=False) + @pytest.mark.usefixtures("sample_hitl_detail") + def test_should_respond_200_with_existing_response( + self, + test_client: TestClient, + sample_ti_url_identifier: str, + ) -> None: + response = test_client.patch( + f"/hitl-details/{sample_ti_url_identifier}/-1", + json={"chosen_options": ["Approve"], "params_input": {"input_1": 2}}, + ) + + assert response.status_code == 200 + assert response.json() == { + "params_input": {"input_1": 2}, + "chosen_options": ["Approve"], + "user_id": "test", + "response_at": "2025-07-03T00:00:00Z", + } + + def test_should_respond_404( + self, + test_client: TestClient, + sample_ti_url_identifier: str, + expected_mapped_ti_not_found_error_msg: str, + ) -> None: + response = test_client.get(f"/hitl-details/{sample_ti_url_identifier}/-1") + assert response.status_code == 404 + assert response.json() == {"detail": expected_mapped_ti_not_found_error_msg} + + @time_machine.travel(datetime(2025, 7, 3, 0, 0, 0), tick=False) + @pytest.mark.usefixtures("sample_hitl_detail") + def test_should_respond_409( + self, + test_client: TestClient, + sample_ti_url_identifier: str, + sample_ti: TaskInstance, + ) -> None: + response = test_client.patch( + f"/hitl-details/{sample_ti_url_identifier}/-1", + json={"chosen_options": ["Approve"], "params_input": {"input_1": 2}}, + ) + + expected_response = { + "params_input": {"input_1": 2}, + "chosen_options": ["Approve"], + "user_id": "test", + "response_at": "2025-07-03T00:00:00Z", + } + assert response.status_code == 200 + assert response.json() == expected_response + + response = test_client.patch( + f"/hitl-details/{sample_ti_url_identifier}/-1", + json={"chosen_options": ["Approve"], "params_input": {"input_1": 2}}, + ) + assert response.status_code == 409 + assert response.json() == { + "detail": ( + "Human-in-the-loop detail has already been updated for Task Instance " + f"with id {sample_ti.id} " + "and is not allowed to write again." + ) + } + + def test_should_respond_401( + self, + unauthenticated_test_client: TestClient, + sample_ti_url_identifier: str, + ) -> None: + response = unauthenticated_test_client.get(f"/hitl-details/{sample_ti_url_identifier}/-1") + assert response.status_code == 401 + + def test_should_respond_403( + self, + unauthorized_test_client: TestClient, + sample_ti_url_identifier: str, + ) -> None: + response = unauthorized_test_client.get(f"/hitl-details/{sample_ti_url_identifier}/-1") + assert response.status_code == 403 + + class TestGetHITLDetailEndpoint: @pytest.mark.usefixtures("sample_hitl_detail") def test_should_respond_200_with_existing_response( @@ -226,6 +321,45 @@ def test_should_respond_403( assert response.status_code == 403 +class TestGetMappedTIHITLDetail: + @pytest.mark.usefixtures("sample_hitl_detail") + def test_should_respond_200_with_existing_response( + self, + test_client: TestClient, + sample_ti_url_identifier: str, + expected_sample_hitl_detail_dict: dict[str, Any], + ) -> None: + response = test_client.get(f"/hitl-details/{sample_ti_url_identifier}/-1") + assert response.status_code == 200 + assert response.json() == expected_sample_hitl_detail_dict + + def test_should_respond_404( + self, + test_client: TestClient, + sample_ti_url_identifier: str, + expected_mapped_ti_not_found_error_msg: str, + ) -> None: + response = test_client.get(f"/hitl-details/{sample_ti_url_identifier}/-1") + assert response.status_code == 404 + assert response.json() == {"detail": expected_mapped_ti_not_found_error_msg} + + def test_should_respond_401( + self, + unauthenticated_test_client: TestClient, + sample_ti_url_identifier: str, + ) -> None: + response = unauthenticated_test_client.get(f"/hitl-details/{sample_ti_url_identifier}/-1") + assert response.status_code == 401 + + def test_should_respond_403( + self, + unauthorized_test_client: TestClient, + sample_ti_url_identifier: str, + ) -> None: + response = unauthorized_test_client.get(f"/hitl-details/{sample_ti_url_identifier}/-1") + assert response.status_code == 403 + + class TestGetHITLDetailsEndpoint: @pytest.mark.usefixtures("sample_hitl_detail") def test_should_respond_200_with_existing_response( From 570350d743f32629f51184614881d1768affa98d Mon Sep 17 00:00:00 2001 From: Wei Lee Date: Thu, 10 Jul 2025 20:03:27 +0800 Subject: [PATCH 29/30] feat(hitl): merge termination operator into approval operators --- .../providers/standard/operators/hitl.py | 59 ++++++++++++------ .../unit/standard/operators/test_hitl.py | 60 +++++++++++++++---- 2 files changed, 91 insertions(+), 28 deletions(-) diff --git a/providers/standard/src/airflow/providers/standard/operators/hitl.py b/providers/standard/src/airflow/providers/standard/operators/hitl.py index 641e56542b924..175687a554ab5 100644 --- a/providers/standard/src/airflow/providers/standard/operators/hitl.py +++ b/providers/standard/src/airflow/providers/standard/operators/hitl.py @@ -16,6 +16,8 @@ # under the License. from __future__ import annotations +import logging + from airflow.exceptions import AirflowOptionalProviderFeatureException from airflow.providers.standard.version_compat import AIRFLOW_V_3_1_PLUS @@ -27,10 +29,10 @@ from datetime import datetime, timezone from typing import TYPE_CHECKING, Any -from airflow.models import SkipMixin from airflow.models.baseoperator import BaseOperator from airflow.providers.standard.exceptions import HITLTriggerEventError from airflow.providers.standard.triggers.hitl import HITLTrigger, HITLTriggerEventSuccessPayload +from airflow.providers.standard.utils.skipmixin import SkipMixin from airflow.sdk.definitions.param import ParamsDict from airflow.sdk.execution_time.hitl import add_hitl_detail @@ -159,29 +161,52 @@ def validate_params_input(self, params_input: Mapping) -> None: raise ValueError(f"params_input {params_input} does not match params {self.params}") -class ApprovalOperator(HITLOperator): +class ApprovalOperator(HITLOperator, SkipMixin): """Human-in-the-loop Operator that has only 'Approval' and 'Reject' options.""" - def __init__(self, **kwargs) -> None: - if "options" in kwargs: - raise ValueError("Passing options to ApprovalOperator is not allowed.") - super().__init__(options=["Approve", "Reject"], **kwargs) + inherits_from_skipmixin = True + FIXED_ARGS = ["options", "multiple"] -class HITLTerminationOperator(HITLOperator, SkipMixin): - """ - Human-in-the-loop Operator that has only 'Stop' and 'Proceed' options. + def __init__(self, ignore_downstream_trigger_rules: bool = False, **kwargs) -> None: + for arg in self.FIXED_ARGS: + if arg in kwargs: + raise ValueError(f"Passing {arg} to ApprovalOperator is not allowed.") - When 'Stop' is selected by user, the dag run terminates like ShortCirquitOperator. - """ + self.ignore_downstream_trigger_rules = ignore_downstream_trigger_rules - def __init__(self, **kwargs) -> None: - if "options" in kwargs: - raise ValueError("Passing options to HITLTerminationOperator is not allowed.") - super().__init__(options=["Stop", "Proceed"], **kwargs) + super().__init__(options=["Approve", "Reject"], multiple=False, **kwargs) - def execute_complete(self, context: Context, event: dict[str, Any]) -> None: - raise NotImplementedError + def execute_complete(self, context: Context, event: dict[str, Any]) -> Any: + ret = super().execute_complete(context=context, event=event) + + chosen_option = ret["chosen_options"][0] + if chosen_option == "Approve": + self.log.info("Approved. Proceeding with downstream tasks...") + return ret + + if not self.downstream_task_ids: + self.log.info("No downstream tasks; nothing to do.") + return ret + + def get_tasks_to_skip(): + if self.ignore_downstream_trigger_rules is True: + tasks = context["task"].get_flat_relatives(upstream=False) + else: + tasks = context["task"].get_direct_relatives(upstream=False) + + yield from (t for t in tasks if not t.is_teardown) + + tasks_to_skip = get_tasks_to_skip() + + # this lets us avoid an intermediate list unless debug logging + if self.log.getEffectiveLevel() <= logging.DEBUG: + self.log.debug("Downstream task IDs %s", tasks_to_skip := list(get_tasks_to_skip())) + + self.log.info("Skipping downstream tasks") + self.skip(ti=context["ti"], tasks=tasks_to_skip) + + return ret class HITLBranchOperator(HITLOperator): diff --git a/providers/standard/tests/unit/standard/operators/test_hitl.py b/providers/standard/tests/unit/standard/operators/test_hitl.py index 9400fc0b6308c..13ef51460bc50 100644 --- a/providers/standard/tests/unit/standard/operators/test_hitl.py +++ b/providers/standard/tests/unit/standard/operators/test_hitl.py @@ -23,20 +23,27 @@ if not AIRFLOW_V_3_1_PLUS: pytest.skip("Human in the loop public API compatible with Airflow >= 3.0.1", allow_module_level=True) +from typing import TYPE_CHECKING, Any + from sqlalchemy import select from airflow.exceptions import DownstreamTasksSkipped from airflow.models import Trigger from airflow.models.hitl import HITLDetail +from airflow.providers.standard.operators.empty import EmptyOperator from airflow.providers.standard.operators.hitl import ( ApprovalOperator, HITLEntryOperator, HITLOperator, - HITLTerminationOperator, ) from airflow.sdk import Param from airflow.sdk.definitions.param import ParamsDict +if TYPE_CHECKING: + from sqlalchemy.orm import Session + + from tests_common.pytest_plugin import DagMaker + pytestmark = pytest.mark.db_test @@ -77,7 +84,7 @@ def test_validate_defaults_with_invalid_defaults(self, extra_kwargs) -> None: **extra_kwargs, ) - def test_execute(self, dag_maker, session) -> None: + def test_execute(self, dag_maker: DagMaker, session: Session) -> None: with dag_maker("test_dag"): task = HITLOperator( task_id="hitl_test", @@ -125,7 +132,7 @@ def test_execute(self, dag_maker, session) -> None: (None, {}), ], ) - def test_serialzed_params(self, input_params, expected_params) -> None: + def test_serialzed_params(self, input_params, expected_params: dict[str, Any]) -> None: hitl_op = HITLOperator( task_id="hitl_test", subject="This is subject", @@ -190,7 +197,7 @@ def test_validate_params_input_with_invalid_input(self) -> None: class TestApprovalOperator: - def test_init(self): + def test_init_with_options(self) -> None: with pytest.raises(ValueError): ApprovalOperator( task_id="hitl_test", @@ -200,21 +207,52 @@ def test_init(self): params={"input": 1}, ) - -class TestHITLTerminationOperator: - def test_init(self): + def test_init_with_multiple_set_to_true(self) -> None: with pytest.raises(ValueError): - HITLTerminationOperator( + ApprovalOperator( task_id="hitl_test", subject="This is subject", - body="This is body", - options=["1", "2", "3", "4", "5"], params={"input": 1}, + multiple=True, + ) + + def test_execute_complete(self) -> None: + hitl_op = ApprovalOperator( + task_id="hitl_test", + subject="This is subject", + ) + + ret = hitl_op.execute_complete( + context={}, + event={"chosen_options": ["Approve"], "params_input": {}}, + ) + + assert ret == { + "chosen_options": ["Approve"], + "params_input": {}, + } + + def test_execute_complete_with_downstream_tasks(self, dag_maker) -> None: + with dag_maker("hitl_test_dag", serialized=True): + hitl_op = ApprovalOperator( + task_id="hitl_test", + subject="This is subject", + ) + (hitl_op >> EmptyOperator(task_id="op1")) + + dr = dag_maker.create_dagrun() + ti = dr.get_task_instance("hitl_test") + + with pytest.raises(DownstreamTasksSkipped) as exc_info: + hitl_op.execute_complete( + context={"ti": ti, "task": ti.task}, + event={"chosen_options": ["Reject"], "params_input": {}}, ) + assert set(exc_info.value.tasks) == {"op1"} class TestHITLEntryOperator: - def test_init(self): + def test_init(self) -> None: op = HITLEntryOperator( task_id="hitl_test", subject="This is subject", From 43ab9823755b99bee9560bdcc149f236f3a12540 Mon Sep 17 00:00:00 2001 From: Wei Lee Date: Thu, 10 Jul 2025 20:08:47 +0800 Subject: [PATCH 30/30] feat(hitl): allow timeout without default --- .../airflow/providers/standard/exceptions.py | 4 ++ .../providers/standard/operators/hitl.py | 18 +++++---- .../providers/standard/triggers/hitl.py | 7 ++-- .../unit/standard/operators/test_hitl.py | 19 ++++++---- .../tests/unit/standard/triggers/test_hitl.py | 37 ++++++++++++++++++- 5 files changed, 66 insertions(+), 19 deletions(-) diff --git a/providers/standard/src/airflow/providers/standard/exceptions.py b/providers/standard/src/airflow/providers/standard/exceptions.py index 718c7aded65dc..6975e0afadfb3 100644 --- a/providers/standard/src/airflow/providers/standard/exceptions.py +++ b/providers/standard/src/airflow/providers/standard/exceptions.py @@ -59,3 +59,7 @@ class DuplicateStateError(AirflowExternalTaskSensorException): class HITLTriggerEventError(AirflowException): """Raised when TriggerEvent contains error.""" + + +class HITLTimeoutError(HITLTriggerEventError): + """Raised when HILTOperator timeouts.""" diff --git a/providers/standard/src/airflow/providers/standard/operators/hitl.py b/providers/standard/src/airflow/providers/standard/operators/hitl.py index 175687a554ab5..6a1f88ddb4435 100644 --- a/providers/standard/src/airflow/providers/standard/operators/hitl.py +++ b/providers/standard/src/airflow/providers/standard/operators/hitl.py @@ -30,7 +30,7 @@ from typing import TYPE_CHECKING, Any from airflow.models.baseoperator import BaseOperator -from airflow.providers.standard.exceptions import HITLTriggerEventError +from airflow.providers.standard.exceptions import HITLTimeoutError, HITLTriggerEventError from airflow.providers.standard.triggers.hitl import HITLTrigger, HITLTriggerEventSuccessPayload from airflow.providers.standard.utils.skipmixin import SkipMixin from airflow.sdk.definitions.param import ParamsDict @@ -84,13 +84,9 @@ def validate_defaults(self) -> None: """ Validate whether the given defaults pass the following criteria. - 1. When timeout is set, default options should be provided. - 2. Default options should be the subset of options. - 3. When multiple is False, there should only be one option. + 1. Default options should be the subset of options. + 2. When multiple is False, there should only be one option. """ - if self.defaults is None and self.execution_timeout: - raise ValueError('"defaults" is required when "execution_timeout" is provided.') - if self.defaults is not None: if not set(self.defaults).issubset(self.options): raise ValueError(f'defaults "{self.defaults}" should be a subset of options "{self.options}"') @@ -135,7 +131,7 @@ def serialzed_params(self) -> dict[str, Any]: def execute_complete(self, context: Context, event: dict[str, Any]) -> Any: if "error" in event: - raise HITLTriggerEventError(event["error"]) + self.process_trigger_event_error(event) chosen_options = event["chosen_options"] params_input = event["params_input"] or {} @@ -146,6 +142,12 @@ def execute_complete(self, context: Context, event: dict[str, Any]) -> Any: params_input=params_input, ) + def process_trigger_event_error(self, event: dict[str, Any]) -> None: + if "error_type" == "timeout": + raise HITLTimeoutError(event) + + raise HITLTriggerEventError(event) + def validate_chosen_options(self, chosen_options: list[str]) -> None: """Check whether user provide valid response.""" if diff := set(chosen_options) - set(self.options): diff --git a/providers/standard/src/airflow/providers/standard/triggers/hitl.py b/providers/standard/src/airflow/providers/standard/triggers/hitl.py index 0e9690346c5c7..63cea15363717 100644 --- a/providers/standard/src/airflow/providers/standard/triggers/hitl.py +++ b/providers/standard/src/airflow/providers/standard/triggers/hitl.py @@ -25,7 +25,7 @@ import asyncio from collections.abc import AsyncIterator from datetime import datetime -from typing import Any, TypedDict +from typing import Any, Literal, TypedDict from uuid import UUID from asgiref.sync import sync_to_async @@ -49,6 +49,7 @@ class HITLTriggerEventFailurePayload(TypedDict): """Minimum required keys for a failed Human-in-the-loop TriggerEvent.""" error: str + error_type: Literal["timeout", "unknown"] class HITLTrigger(BaseTrigger): @@ -96,11 +97,11 @@ async def run(self) -> AsyncIterator[TriggerEvent]: """Loop until the Human-in-the-loop response received or timeout reached.""" while True: if self.timeout_datetime and self.timeout_datetime < timezone.utcnow(): - # This normally should be checked in the HITLOperator if self.defaults is None: yield TriggerEvent( HITLTriggerEventFailurePayload( - error='defaults" is required when "execution_timeout" is provided.' + error="The timeout has passed, and the response has not yet been received.", + error_type="timeout", ) ) return diff --git a/providers/standard/tests/unit/standard/operators/test_hitl.py b/providers/standard/tests/unit/standard/operators/test_hitl.py index 13ef51460bc50..767c379bdc0e5 100644 --- a/providers/standard/tests/unit/standard/operators/test_hitl.py +++ b/providers/standard/tests/unit/standard/operators/test_hitl.py @@ -61,20 +61,25 @@ def test_validate_defaults(self) -> None: hitl_op.validate_defaults() @pytest.mark.parametrize( - "extra_kwargs", + "extra_kwargs, expected_error_msg", [ - {"defaults": None, "execution_timeout": 10}, - {"defaults": ["0"]}, - {"multiple": False, "defaults": ["1", "2"]}, + ({"defaults": ["0"]}, r'defaults ".*" should be a subset of options ".*"'), + ( + {"multiple": False, "defaults": ["1", "2"]}, + 'More than one defaults given when "multiple" is set to False.', + ), ], ids=[ - "timeout with no defaults", "defaults not in option", "multiple defaults when multiple is False", ], ) - def test_validate_defaults_with_invalid_defaults(self, extra_kwargs) -> None: - with pytest.raises(ValueError): + def test_validate_defaults_with_invalid_defaults( + self, + extra_kwargs: dict[str, Any], + expected_error_msg: str, + ) -> None: + with pytest.raises(ValueError, match=expected_error_msg): HITLOperator( task_id="hitl_test", subject="This is subject", diff --git a/providers/standard/tests/unit/standard/triggers/test_hitl.py b/providers/standard/tests/unit/standard/triggers/test_hitl.py index abfb641754c9e..ac96d9eed1e07 100644 --- a/providers/standard/tests/unit/standard/triggers/test_hitl.py +++ b/providers/standard/tests/unit/standard/triggers/test_hitl.py @@ -31,7 +31,11 @@ from uuid6 import uuid7 from airflow.api_fastapi.execution_api.datamodels.hitl import HITLDetailResponse -from airflow.providers.standard.triggers.hitl import HITLTrigger, HITLTriggerEventSuccessPayload +from airflow.providers.standard.triggers.hitl import ( + HITLTrigger, + HITLTriggerEventFailurePayload, + HITLTriggerEventSuccessPayload, +) from airflow.triggers.base import TriggerEvent from airflow.utils.timezone import utcnow @@ -61,6 +65,37 @@ def test_serialization(self): "poke_interval": 50.0, } + @pytest.mark.db_test + @pytest.mark.asyncio + @mock.patch("airflow.sdk.execution_time.hitl.update_htil_detail_response") + async def test_run_failed_due_to_timeout(self, mock_update, mock_supervisor_comms): + trigger = HITLTrigger( + ti_id=TI_ID, + options=["1", "2", "3", "4", "5"], + params={"input": 1}, + multiple=False, + timeout_datetime=utcnow() + timedelta(seconds=0.1), + poke_interval=5, + ) + mock_supervisor_comms.send.return_value = HITLDetailResponse( + response_received=False, + user_id=None, + response_at=None, + chosen_options=None, + params_input={}, + ) + + gen = trigger.run() + trigger_task = asyncio.create_task(gen.__anext__()) + await asyncio.sleep(0.3) + event = await trigger_task + assert event == TriggerEvent( + HITLTriggerEventFailurePayload( + error="The timeout has passed, and the response has not yet been received.", + error_type="timeout", + ) + ) + @pytest.mark.db_test @pytest.mark.asyncio @mock.patch("airflow.sdk.execution_time.hitl.update_htil_detail_response")