From 486a72e932fa0cddf852cd8a38a2e7ca4f79dea5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miroslav=20=C5=A0ediv=C3=BD?= <6774676+eumiro@users.noreply.github.com> Date: Mon, 21 Aug 2023 20:32:45 +0200 Subject: [PATCH] Replace strftime with f-strings where nicer --- airflow/models/taskinstance.py | 2 +- airflow/providers/amazon/aws/hooks/sagemaker.py | 6 ++---- airflow/providers/elasticsearch/log/es_json_formatter.py | 6 +----- airflow/providers/google/cloud/operators/dataproc.py | 3 +-- airflow/providers/oracle/hooks/oracle.py | 4 +--- airflow/task/task_runner/cgroup_task_runner.py | 2 +- airflow/utils/log/timezone_aware.py | 6 +----- .../airflow_breeze/commands/release_management_commands.py | 2 +- tests/dags_corrupted/test_impersonation_custom.py | 2 +- tests/providers/amazon/aws/utils/eks_test_utils.py | 2 +- tests/providers/apache/kylin/operators/test_kylin_cube.py | 4 ++-- tests/providers/http/sensors/test_http.py | 4 +--- 12 files changed, 14 insertions(+), 29 deletions(-) diff --git a/airflow/models/taskinstance.py b/airflow/models/taskinstance.py index 975e615d95414..0be2f005bf5fd 100644 --- a/airflow/models/taskinstance.py +++ b/airflow/models/taskinstance.py @@ -2139,7 +2139,7 @@ def get_prev_ds() -> str | None: execution_date = get_prev_execution_date() if execution_date is None: return None - return execution_date.strftime(r"%Y-%m-%d") + return execution_date.strftime("%Y-%m-%d") def get_prev_ds_nodash() -> str | None: prev_ds = get_prev_ds() diff --git a/airflow/providers/amazon/aws/hooks/sagemaker.py b/airflow/providers/amazon/aws/hooks/sagemaker.py index b8247d9de97bb..364e073b1a8d3 100644 --- a/airflow/providers/amazon/aws/hooks/sagemaker.py +++ b/airflow/providers/amazon/aws/hooks/sagemaker.py @@ -128,10 +128,8 @@ def secondary_training_status_message( status_strs = [] for transition in transitions_to_print: message = transition["StatusMessage"] - time_str = timezone.convert_to_utc(cast(datetime, job_description["LastModifiedTime"])).strftime( - "%Y-%m-%d %H:%M:%S" - ) - status_strs.append(f"{time_str} {transition['Status']} - {message}") + time_utc = timezone.convert_to_utc(cast(datetime, job_description["LastModifiedTime"])) + status_strs.append(f"{time_utc:%Y-%m-%d %H:%M:%S} {transition['Status']} - {message}") return "\n".join(status_strs) diff --git a/airflow/providers/elasticsearch/log/es_json_formatter.py b/airflow/providers/elasticsearch/log/es_json_formatter.py index 7ac543c0a3a0d..cf77896a9218e 100644 --- a/airflow/providers/elasticsearch/log/es_json_formatter.py +++ b/airflow/providers/elasticsearch/log/es_json_formatter.py @@ -31,11 +31,7 @@ class ElasticsearchJSONFormatter(JSONFormatter): def formatTime(self, record, datefmt=None): """Return the creation time of the LogRecord in ISO 8601 date/time format in the local time zone.""" dt = pendulum.from_timestamp(record.created, tz=pendulum.local_timezone()) - if datefmt: - s = dt.strftime(datefmt) - else: - s = dt.strftime(self.default_time_format) - + s = dt.strftime(datefmt or self.default_time_format) if self.default_msec_format: s = self.default_msec_format % (s, record.msecs) if self.default_tz_format: diff --git a/airflow/providers/google/cloud/operators/dataproc.py b/airflow/providers/google/cloud/operators/dataproc.py index 09e76ae2069fe..eaa65ba7c15db 100644 --- a/airflow/providers/google/cloud/operators/dataproc.py +++ b/airflow/providers/google/cloud/operators/dataproc.py @@ -1578,8 +1578,7 @@ class DataprocSubmitPySparkJobOperator(DataprocJobBaseOperator): @staticmethod def _generate_temp_filename(filename): - date = time.strftime("%Y%m%d%H%M%S") - return f"{date}_{str(uuid.uuid4())[:8]}_{ntpath.basename(filename)}" + return f"{time:%Y%m%d%H%M%S}_{str(uuid.uuid4())[:8]}_{ntpath.basename(filename)}" def _upload_file_temp(self, bucket, local_file): """Upload a local file to a Google Cloud Storage bucket.""" diff --git a/airflow/providers/oracle/hooks/oracle.py b/airflow/providers/oracle/hooks/oracle.py index 225a8ca6ccd60..f207df72d9af5 100644 --- a/airflow/providers/oracle/hooks/oracle.py +++ b/airflow/providers/oracle/hooks/oracle.py @@ -302,9 +302,7 @@ def insert_rows( elif numpy and isinstance(cell, numpy.datetime64): lst.append("'" + str(cell) + "'") elif isinstance(cell, datetime): - lst.append( - "to_date('" + cell.strftime("%Y-%m-%d %H:%M:%S") + "','YYYY-MM-DD HH24:MI:SS')" - ) + lst.append(f"to_date('{cell:%Y-%m-%d %H:%M:%S}','YYYY-MM-DD HH24:MI:SS')") else: lst.append(str(cell)) values = tuple(lst) diff --git a/airflow/task/task_runner/cgroup_task_runner.py b/airflow/task/task_runner/cgroup_task_runner.py index 2ab011471377a..14354453bc62a 100644 --- a/airflow/task/task_runner/cgroup_task_runner.py +++ b/airflow/task/task_runner/cgroup_task_runner.py @@ -134,7 +134,7 @@ def start(self): return # Create a unique cgroup name - cgroup_name = f"airflow/{datetime.datetime.utcnow().strftime('%Y-%m-%d')}/{str(uuid.uuid4())}" + cgroup_name = f"airflow/{datetime.datetime.utcnow():%Y-%m-%d}/{uuid.uuid4()}" self.mem_cgroup_name = f"memory/{cgroup_name}" self.cpu_cgroup_name = f"cpu/{cgroup_name}" diff --git a/airflow/utils/log/timezone_aware.py b/airflow/utils/log/timezone_aware.py index 999ccda5a722a..ae96a11116a1f 100644 --- a/airflow/utils/log/timezone_aware.py +++ b/airflow/utils/log/timezone_aware.py @@ -40,11 +40,7 @@ def formatTime(self, record, datefmt=None): date and time format in the local time zone. """ dt = pendulum.from_timestamp(record.created, tz=pendulum.local_timezone()) - if datefmt: - s = dt.strftime(datefmt) - else: - s = dt.strftime(self.default_time_format) - + s = dt.strftime(datefmt or self.default_time_format) if self.default_msec_format: s = self.default_msec_format % (s, record.msecs) if self.default_tz_format: diff --git a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py index dc01f1aff5606..aa4f7c3a8573f 100644 --- a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py @@ -1203,7 +1203,7 @@ class ProviderPRInfo(NamedTuple): get_console().print() get_console().print( "Issue title: [yellow]Status of testing Providers that were " - f"prepared on {datetime.now().strftime('%B %d, %Y')}[/]" + f"prepared on {datetime.now():%B %d, %Y}[/]" ) get_console().print() syntax = Syntax(issue_content, "markdown", theme="ansi_dark") diff --git a/tests/dags_corrupted/test_impersonation_custom.py b/tests/dags_corrupted/test_impersonation_custom.py index 7fc9980d6cc63..d52342c65868f 100644 --- a/tests/dags_corrupted/test_impersonation_custom.py +++ b/tests/dags_corrupted/test_impersonation_custom.py @@ -40,7 +40,7 @@ def print_today(): date_time = FakeDatetime.utcnow() - print(f"Today is {date_time.strftime('%Y-%m-%d')}") + print(f"Today is {date_time:%Y-%m-%d}") def check_hive_conf(): diff --git a/tests/providers/amazon/aws/utils/eks_test_utils.py b/tests/providers/amazon/aws/utils/eks_test_utils.py index ffab5d7fbfc38..d3f0c715ce09c 100644 --- a/tests/providers/amazon/aws/utils/eks_test_utils.py +++ b/tests/providers/amazon/aws/utils/eks_test_utils.py @@ -235,7 +235,7 @@ def convert_keys(original: dict) -> dict: def iso_date(input_datetime: datetime.datetime) -> str: - return input_datetime.strftime("%Y-%m-%dT%H:%M:%S") + "Z" + return f"{input_datetime:%Y-%m-%dT%H:%M:%S}Z" def generate_dict(prefix, count) -> dict: diff --git a/tests/providers/apache/kylin/operators/test_kylin_cube.py b/tests/providers/apache/kylin/operators/test_kylin_cube.py index baa61ad80fc4a..58c1832a0cdd6 100644 --- a/tests/providers/apache/kylin/operators/test_kylin_cube.py +++ b/tests/providers/apache/kylin/operators/test_kylin_cube.py @@ -37,8 +37,8 @@ class TestKylinCubeOperator: "project": "learn_kylin", "cube": "kylin_sales_cube", "command": "build", - "start_time": datetime(2012, 1, 2, 0, 0).strftime("%s") + "000", - "end_time": datetime(2012, 1, 3, 0, 0).strftime("%s") + "000", + "start_time": str(int(datetime(2012, 1, 2, 0, 0).timestamp() * 1000)), + "end_time": str(int(datetime(2012, 1, 3, 0, 0).timestamp() * 1000)), } cube_command = [ "fullbuild", diff --git a/tests/providers/http/sensors/test_http.py b/tests/providers/http/sensors/test_http.py index da070d91c2a93..b2fe5921a0a84 100644 --- a/tests/providers/http/sensors/test_http.py +++ b/tests/providers/http/sensors/test_http.py @@ -244,9 +244,7 @@ def test_sensor(self): endpoint="/search", request_params={"client": "ubuntu", "q": "airflow", "date": "{{ds}}"}, headers={}, - response_check=lambda response: ( - "apache/airflow/" + DEFAULT_DATE.strftime("%Y-%m-%d") in response.text - ), + response_check=lambda response: f"apache/airflow/{DEFAULT_DATE:%Y-%m-%d}" in response.text, poke_interval=5, timeout=15, dag=self.dag,