Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion airflow/api_connexion/endpoints/task_endpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def get_tasks(*, dag_id: str, order_by: str = "task_id") -> APIResponse:
tasks = dag.tasks

try:
tasks = sorted(tasks, key=attrgetter(order_by.lstrip("-")), reverse=(order_by[0:1] == "-"))
tasks.sort(key=attrgetter(order_by.lstrip("-")), reverse=(order_by[0:1] == "-"))
except AttributeError as err:
raise BadRequest(detail=str(err))
task_collection = TaskCollection(tasks=tasks, total_entries=len(tasks))
Expand Down
4 changes: 2 additions & 2 deletions airflow/dag_processing/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -865,7 +865,7 @@ def _log_file_processing_stats(self, known_file_paths):
rows.append((file_path, processor_pid, runtime, num_dags, num_errors, last_runtime, last_run))

# Sort by longest last runtime. (Can't sort None values in python3)
rows = sorted(rows, key=lambda x: x[3] or 0.0)
rows.sort(key=lambda x: x[3] or 0.0)

formatted_rows = []
for file_path, pid, runtime, num_dags, num_errors, last_runtime, last_run in rows:
Expand Down Expand Up @@ -1167,7 +1167,7 @@ def prepare_file_path_queue(self):
if is_mtime_mode:
file_paths = sorted(files_with_mtime, key=files_with_mtime.get, reverse=True)
elif list_mode == "alphabetical":
file_paths = sorted(file_paths)
file_paths.sort()
elif list_mode == "random_seeded_by_host":
# Shuffle the list seeded by hostname so multiple schedulers can work on different
# set of files. Since we set the seed, the sort order will remain same per host
Expand Down
2 changes: 1 addition & 1 deletion airflow/timetables/events.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ def __init__(
self.event_dates = list(event_dates) # Must be reversible and indexable
if not presorted:
# For long lists this could take a while, so only want to do it once
self.event_dates = sorted(self.event_dates)
self.event_dates.sort()
self.restrict_to_events = restrict_to_events
if description is None:
self.description = (
Expand Down
5 changes: 2 additions & 3 deletions dev/validate_version_added_fields_in_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,9 +114,8 @@ def read_local_config_options() -> set[tuple[str, str, str]]:
computed_option_new_section.update(options)

# 1. Prepare versions to checks
airflow_version = fetch_pypi_versions()
airflow_version = sorted(airflow_version, key=semver.VersionInfo.parse)
to_check_versions: list[str] = [d for d in airflow_version if d.startswith("2.")]
to_check_versions: list[str] = [d for d in fetch_pypi_versions() if d.startswith("2.")]
to_check_versions.sort(key=semver.VersionInfo.parse)

# 2. Compute expected options set with version added fields
expected_computed_options: set[tuple[str, str, str]] = set()
Expand Down
2 changes: 1 addition & 1 deletion scripts/ci/pre_commit/pre_commit_sort_in_the_wild.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,5 +68,5 @@ def stable_sort(x):
line = "1." + line.split(".", maxsplit=1)[1]
print(f"{old_line.strip()} => {line.strip()}")
companies.append(line)
companies = sorted(companies, key=stable_sort)
companies.sort(key=stable_sort)
inthewild_path.write_text("".join(header) + "\n" + "".join(companies))
2 changes: 1 addition & 1 deletion scripts/tools/generate-integrations-json.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@
result["logo"] = logo
result_integrations.append(result)

result_integrations = sorted(result_integrations, key=lambda x: x["name"].lower())
result_integrations.sort(key=lambda x: x["name"].lower())
with open(os.path.join(AIRFLOW_SITE_DIR, "landing-pages/site/static/integrations.json"), "w") as f:
f.write(
json.dumps(
Expand Down
2 changes: 1 addition & 1 deletion tests/jobs/test_scheduler_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -601,7 +601,7 @@ def test_find_executable_task_instances_pool(self, dag_maker):
dr2.get_task_instance(task_id_1, session=session),
dr2.get_task_instance(task_id_2, session=session),
]
tis = sorted(tis, key=lambda ti: ti.key)
tis.sort(key=lambda ti: ti.key)
for ti in tis:
ti.state = State.SCHEDULED
session.merge(ti)
Expand Down