From 4835556e58038ed7e4efaebe368932be01bb2aea Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Sat, 22 Apr 2023 12:41:14 +0200 Subject: [PATCH] Upgrade ruff to 0.0.262 --- .pre-commit-config.yaml | 2 +- airflow/kubernetes/pod.py | 2 +- .../amazon/aws/transfers/dynamodb_to_s3.py | 2 +- .../amazon/aws/triggers/redshift_cluster.py | 2 +- airflow/providers/apache/kafka/hooks/base.py | 2 +- airflow/providers/apache/kafka/hooks/client.py | 2 +- airflow/providers/apache/livy/triggers/livy.py | 2 +- airflow/providers/cncf/kubernetes/triggers/pod.py | 2 +- airflow/providers/dbt/cloud/operators/dbt.py | 2 +- airflow/providers/dbt/cloud/triggers/dbt.py | 2 +- airflow/providers/google/cloud/hooks/bigtable.py | 2 +- .../providers/google/cloud/triggers/bigquery.py | 14 +++++++------- .../providers/google/cloud/triggers/cloud_build.py | 2 +- .../providers/google/cloud/triggers/datafusion.py | 2 +- .../providers/google/cloud/triggers/dataproc.py | 6 +++--- airflow/providers/google/cloud/triggers/gcs.py | 6 +++--- .../google/cloud/triggers/kubernetes_engine.py | 2 +- .../providers/google/cloud/triggers/mlengine.py | 2 +- airflow/providers/http/hooks/http.py | 2 +- .../microsoft/azure/triggers/data_factory.py | 4 ++-- airflow/providers/microsoft/azure/triggers/wasb.py | 4 ++-- airflow/stats.py | 2 +- airflow/triggers/base.py | 2 +- airflow/triggers/external_task.py | 4 ++-- airflow/triggers/file.py | 2 +- docs/exts/exampleinclude.py | 2 +- scripts/ci/pre_commit/pre_commit_insert_extras.py | 4 ++-- .../ci/pre_commit/pre_commit_local_yml_mounts.py | 6 +++--- tests/cli/conftest.py | 2 +- .../google/cloud/bigtable/example_bigtable.py | 2 +- 30 files changed, 46 insertions(+), 46 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index dbc8229360770..c8000c6abbd2d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -179,7 +179,7 @@ repos: # Since ruff makes use of multiple cores we _purposefully_ don't run this in docker so it can use the # host CPU to it's fullest entry: ruff --fix --no-update-check --force-exclude - additional_dependencies: ['ruff==0.0.226'] + additional_dependencies: ['ruff==0.0.262'] files: \.pyi?$ exclude: ^.*/.*_vendor/|^tests/dags/test_imports.py - repo: https://github.com/asottile/blacken-docs diff --git a/airflow/kubernetes/pod.py b/airflow/kubernetes/pod.py index 5b946b2e3a885..629cbad17c782 100644 --- a/airflow/kubernetes/pod.py +++ b/airflow/kubernetes/pod.py @@ -29,7 +29,7 @@ with warnings.catch_warnings(): warnings.simplefilter("ignore", RemovedInAirflow3Warning) - from airflow.providers.cncf.kubernetes.backcompat.pod import Port, Resources # noqa: autoflake + from airflow.providers.cncf.kubernetes.backcompat.pod import Port, Resources warnings.warn( "This module is deprecated. Please use `kubernetes.client.models` for `V1ResourceRequirements` and `Port`.", diff --git a/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py b/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py index 4067217d97d40..bd2034893e5e8 100644 --- a/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py +++ b/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py @@ -87,7 +87,7 @@ class DynamoDBToS3Operator(AwsToAwsBaseOperator): :param dynamodb_scan_kwargs: kwargs pass to :param s3_key_prefix: Prefix of s3 object key :param process_func: How we transforms a dynamodb item to bytes. By default we dump the json - """ # noqa: E501 + """ template_fields: Sequence[str] = ( *AwsToAwsBaseOperator.template_fields, diff --git a/airflow/providers/amazon/aws/triggers/redshift_cluster.py b/airflow/providers/amazon/aws/triggers/redshift_cluster.py index a32a6efa19924..9e5cac62549a1 100644 --- a/airflow/providers/amazon/aws/triggers/redshift_cluster.py +++ b/airflow/providers/amazon/aws/triggers/redshift_cluster.py @@ -55,7 +55,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: }, ) - async def run(self) -> AsyncIterator["TriggerEvent"]: + async def run(self) -> AsyncIterator[TriggerEvent]: hook = RedshiftAsyncHook(aws_conn_id=self.aws_conn_id) while self.attempts >= 1: self.attempts = self.attempts - 1 diff --git a/airflow/providers/apache/kafka/hooks/base.py b/airflow/providers/apache/kafka/hooks/base.py index bd3a2d3cc74b8..eb5429a6a56b7 100644 --- a/airflow/providers/apache/kafka/hooks/base.py +++ b/airflow/providers/apache/kafka/hooks/base.py @@ -58,7 +58,7 @@ def _get_client(self, config): @cached_property def get_conn(self) -> Any: - """get the configuration object""" + """Get the configuration object""" config = self.get_connection(self.kafka_config_id).extra_dejson if not (config.get("bootstrap.servers", None)): diff --git a/airflow/providers/apache/kafka/hooks/client.py b/airflow/providers/apache/kafka/hooks/client.py index 7613bfab220c0..1043a3edb5e05 100644 --- a/airflow/providers/apache/kafka/hooks/client.py +++ b/airflow/providers/apache/kafka/hooks/client.py @@ -41,7 +41,7 @@ def create_topic( self, topics: Sequence[Sequence[Any]], ) -> None: - """creates a topic + """Creates a topic :param topics: a list of topics to create including the number of partitions for the topic and the replication factor. Format: [ ("topic_name", number of partitions, replication factor)] diff --git a/airflow/providers/apache/livy/triggers/livy.py b/airflow/providers/apache/livy/triggers/livy.py index cfcbde53b9f5f..17c5567679a43 100644 --- a/airflow/providers/apache/livy/triggers/livy.py +++ b/airflow/providers/apache/livy/triggers/livy.py @@ -78,7 +78,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: }, ) - async def run(self) -> AsyncIterator["TriggerEvent"]: + async def run(self) -> AsyncIterator[TriggerEvent]: """ Checks if the _polling_interval > 0, in that case it pools Livy for batch termination asynchronously. diff --git a/airflow/providers/cncf/kubernetes/triggers/pod.py b/airflow/providers/cncf/kubernetes/triggers/pod.py index e024e7786ade7..b4a88162537fc 100644 --- a/airflow/providers/cncf/kubernetes/triggers/pod.py +++ b/airflow/providers/cncf/kubernetes/triggers/pod.py @@ -116,7 +116,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: }, ) - async def run(self) -> AsyncIterator["TriggerEvent"]: # type: ignore[override] + async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override] """Gets current pod status and yields a TriggerEvent""" hook = self._get_async_hook() self.log.info("Checking pod %r in namespace %r.", self.pod_name, self.pod_namespace) diff --git a/airflow/providers/dbt/cloud/operators/dbt.py b/airflow/providers/dbt/cloud/operators/dbt.py index 754c0cbdbfce7..de110e9865b4f 100644 --- a/airflow/providers/dbt/cloud/operators/dbt.py +++ b/airflow/providers/dbt/cloud/operators/dbt.py @@ -173,7 +173,7 @@ def execute(self, context: Context): ) return self.run_id - def execute_complete(self, context: "Context", event: dict[str, Any]) -> int: + def execute_complete(self, context: Context, event: dict[str, Any]) -> int: """ Callback for when the trigger fires - returns immediately. Relies on trigger to throw an exception, otherwise it assumes execution was diff --git a/airflow/providers/dbt/cloud/triggers/dbt.py b/airflow/providers/dbt/cloud/triggers/dbt.py index 9bad789a5246f..311107341299b 100644 --- a/airflow/providers/dbt/cloud/triggers/dbt.py +++ b/airflow/providers/dbt/cloud/triggers/dbt.py @@ -64,7 +64,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: }, ) - async def run(self) -> AsyncIterator["TriggerEvent"]: + async def run(self) -> AsyncIterator[TriggerEvent]: """Make async connection to Dbt, polls for the pipeline run status""" hook = DbtCloudHook(self.conn_id) try: diff --git a/airflow/providers/google/cloud/hooks/bigtable.py b/airflow/providers/google/cloud/hooks/bigtable.py index c51c1a990febb..999d141547555 100644 --- a/airflow/providers/google/cloud/hooks/bigtable.py +++ b/airflow/providers/google/cloud/hooks/bigtable.py @@ -248,7 +248,7 @@ def delete_table(self, instance_id: str, table_id: str, project_id: str) -> None """ instance = self.get_instance(instance_id=instance_id, project_id=project_id) if instance is None: - raise RuntimeError("Instance %s did not exist; unable to delete table %s" % instance_id, table_id) + raise RuntimeError(f"Instance {instance_id} did not exist; unable to delete table {table_id}") table = instance.table(table_id=table_id) table.delete() diff --git a/airflow/providers/google/cloud/triggers/bigquery.py b/airflow/providers/google/cloud/triggers/bigquery.py index e8d889221981d..ba4ce8c19be42 100644 --- a/airflow/providers/google/cloud/triggers/bigquery.py +++ b/airflow/providers/google/cloud/triggers/bigquery.py @@ -71,7 +71,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: }, ) - async def run(self) -> AsyncIterator["TriggerEvent"]: # type: ignore[override] + async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override] """Gets current job execution status and yields a TriggerEvent""" hook = self._get_async_hook() while True: @@ -122,7 +122,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: }, ) - async def run(self) -> AsyncIterator["TriggerEvent"]: # type: ignore[override] + async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override] """Gets current job execution status and yields a TriggerEvent""" hook = self._get_async_hook() while True: @@ -181,7 +181,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: }, ) - async def run(self) -> AsyncIterator["TriggerEvent"]: # type: ignore[override] + async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override] """Gets current job execution status and yields a TriggerEvent with response data""" hook = self._get_async_hook() while True: @@ -286,7 +286,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: }, ) - async def run(self) -> AsyncIterator["TriggerEvent"]: # type: ignore[override] + async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override] """Gets current job execution status and yields a TriggerEvent""" hook = self._get_async_hook() while True: @@ -414,7 +414,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: }, ) - async def run(self) -> AsyncIterator["TriggerEvent"]: # type: ignore[override] + async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override] """Gets current job execution status and yields a TriggerEvent""" hook = self._get_async_hook() while True: @@ -487,7 +487,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: def _get_async_hook(self) -> BigQueryTableAsyncHook: return BigQueryTableAsyncHook(gcp_conn_id=self.gcp_conn_id) - async def run(self) -> AsyncIterator["TriggerEvent"]: # type: ignore[override] + async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override] """Will run until the table exists in the Google Big Query.""" while True: try: @@ -562,7 +562,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: }, ) - async def run(self) -> AsyncIterator["TriggerEvent"]: # type: ignore[override] + async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override] """Will run until the table exists in the Google Big Query.""" hook = BigQueryAsyncHook(gcp_conn_id=self.gcp_conn_id) job_id = None diff --git a/airflow/providers/google/cloud/triggers/cloud_build.py b/airflow/providers/google/cloud/triggers/cloud_build.py index c40588b5cae65..3187dc0c1d342 100644 --- a/airflow/providers/google/cloud/triggers/cloud_build.py +++ b/airflow/providers/google/cloud/triggers/cloud_build.py @@ -75,7 +75,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: }, ) - async def run(self) -> AsyncIterator["TriggerEvent"]: # type: ignore[override] + async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override] """Gets current build execution status and yields a TriggerEvent""" hook = self._get_async_hook() while True: diff --git a/airflow/providers/google/cloud/triggers/datafusion.py b/airflow/providers/google/cloud/triggers/datafusion.py index 5619d1a26f416..34fa7d025819f 100644 --- a/airflow/providers/google/cloud/triggers/datafusion.py +++ b/airflow/providers/google/cloud/triggers/datafusion.py @@ -80,7 +80,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: }, ) - async def run(self) -> AsyncIterator["TriggerEvent"]: # type: ignore[override] + async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override] """Gets current pipeline status and yields a TriggerEvent""" hook = self._get_async_hook() while True: diff --git a/airflow/providers/google/cloud/triggers/dataproc.py b/airflow/providers/google/cloud/triggers/dataproc.py index d3736afca35ce..c340b44efe52e 100644 --- a/airflow/providers/google/cloud/triggers/dataproc.py +++ b/airflow/providers/google/cloud/triggers/dataproc.py @@ -143,7 +143,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: }, ) - async def run(self) -> AsyncIterator["TriggerEvent"]: + async def run(self) -> AsyncIterator[TriggerEvent]: while True: cluster = await self.get_async_hook().get_cluster( project_id=self.project_id, region=self.region, cluster_name=self.cluster_name @@ -261,7 +261,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: }, ) - async def run(self) -> AsyncIterator["TriggerEvent"]: + async def run(self) -> AsyncIterator[TriggerEvent]: """Wait until cluster is deleted completely""" while self.end_time > time.time(): try: @@ -309,7 +309,7 @@ def serialize(self): }, ) - async def run(self) -> AsyncIterator["TriggerEvent"]: + async def run(self) -> AsyncIterator[TriggerEvent]: hook = self.get_async_hook() while True: try: diff --git a/airflow/providers/google/cloud/triggers/gcs.py b/airflow/providers/google/cloud/triggers/gcs.py index 7003df5942aff..13a8fcbde28a2 100644 --- a/airflow/providers/google/cloud/triggers/gcs.py +++ b/airflow/providers/google/cloud/triggers/gcs.py @@ -66,8 +66,8 @@ def serialize(self) -> tuple[str, dict[str, Any]]: }, ) - async def run(self) -> AsyncIterator["TriggerEvent"]: - """loop until the relevant file/folder is found.""" + async def run(self) -> AsyncIterator[TriggerEvent]: + """Loop until the relevant file/folder is found.""" try: hook = self._get_async_hook() while True: @@ -144,7 +144,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: }, ) - async def run(self) -> AsyncIterator["TriggerEvent"]: + async def run(self) -> AsyncIterator[TriggerEvent]: """Loop until the object updated time is greater than target datetime""" try: hook = self._get_async_hook() diff --git a/airflow/providers/google/cloud/triggers/kubernetes_engine.py b/airflow/providers/google/cloud/triggers/kubernetes_engine.py index cd71e0d9a32ce..237a88e35298b 100644 --- a/airflow/providers/google/cloud/triggers/kubernetes_engine.py +++ b/airflow/providers/google/cloud/triggers/kubernetes_engine.py @@ -157,7 +157,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: }, ) - async def run(self) -> AsyncIterator["TriggerEvent"]: # type: ignore[override] + async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override] """Gets operation status and yields corresponding event.""" hook = self._get_hook() while True: diff --git a/airflow/providers/google/cloud/triggers/mlengine.py b/airflow/providers/google/cloud/triggers/mlengine.py index 8d42216dcd34d..c221308f2aee4 100644 --- a/airflow/providers/google/cloud/triggers/mlengine.py +++ b/airflow/providers/google/cloud/triggers/mlengine.py @@ -88,7 +88,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: }, ) - async def run(self) -> AsyncIterator["TriggerEvent"]: # type: ignore[override] + async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override] """Gets current job execution status and yields a TriggerEvent""" hook = self._get_async_hook() while True: diff --git a/airflow/providers/http/hooks/http.py b/airflow/providers/http/hooks/http.py index ed1d6dd893624..ef09713a8eb32 100644 --- a/airflow/providers/http/hooks/http.py +++ b/airflow/providers/http/hooks/http.py @@ -306,7 +306,7 @@ async def run( data: dict[str, Any] | str | None = None, headers: dict[str, Any] | None = None, extra_options: dict[str, Any] | None = None, - ) -> "ClientResponse": + ) -> ClientResponse: r""" Performs an asynchronous HTTP request call diff --git a/airflow/providers/microsoft/azure/triggers/data_factory.py b/airflow/providers/microsoft/azure/triggers/data_factory.py index 04f5638683a4f..c4c02ee9ffe61 100644 --- a/airflow/providers/microsoft/azure/triggers/data_factory.py +++ b/airflow/providers/microsoft/azure/triggers/data_factory.py @@ -67,7 +67,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: }, ) - async def run(self) -> AsyncIterator["TriggerEvent"]: + async def run(self) -> AsyncIterator[TriggerEvent]: """Make async connection to Azure Data Factory, polls for the pipeline run status""" hook = AzureDataFactoryAsyncHook(azure_data_factory_conn_id=self.azure_data_factory_conn_id) try: @@ -140,7 +140,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: }, ) - async def run(self) -> AsyncIterator["TriggerEvent"]: + async def run(self) -> AsyncIterator[TriggerEvent]: """Make async connection to Azure Data Factory, polls for the pipeline run status""" hook = AzureDataFactoryAsyncHook(azure_data_factory_conn_id=self.azure_data_factory_conn_id) try: diff --git a/airflow/providers/microsoft/azure/triggers/wasb.py b/airflow/providers/microsoft/azure/triggers/wasb.py index 6630534c58d17..2b81df2bbcf20 100644 --- a/airflow/providers/microsoft/azure/triggers/wasb.py +++ b/airflow/providers/microsoft/azure/triggers/wasb.py @@ -63,7 +63,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: }, ) - async def run(self) -> AsyncIterator["TriggerEvent"]: + async def run(self) -> AsyncIterator[TriggerEvent]: """Makes async connection to Azure WASB and polls for existence of the given blob name.""" blob_exists = False hook = WasbAsyncHook(wasb_conn_id=self.wasb_conn_id, public_read=self.public_read) @@ -138,7 +138,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: }, ) - async def run(self) -> AsyncIterator["TriggerEvent"]: + async def run(self) -> AsyncIterator[TriggerEvent]: """Makes async connection to Azure WASB and polls for existence of a blob with given prefix.""" prefix_exists = False hook = WasbAsyncHook(wasb_conn_id=self.wasb_conn_id, public_read=self.public_read) diff --git a/airflow/stats.py b/airflow/stats.py index da31e7d913eb9..d8eb1d0f644f8 100644 --- a/airflow/stats.py +++ b/airflow/stats.py @@ -301,7 +301,7 @@ def wrapper( if stat is not None and tags is not None: for k, v in tags.items(): if self.metric_tags_validator.test(k): - if all((c not in [",", "="] for c in v + k)): + if all(c not in [",", "="] for c in v + k): stat += f",{k}={v}" else: log.error("Dropping invalid tag: %s=%s.", k, v) diff --git a/airflow/triggers/base.py b/airflow/triggers/base.py index fa968ebe9e5a1..314d97b0ee919 100644 --- a/airflow/triggers/base.py +++ b/airflow/triggers/base.py @@ -60,7 +60,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: raise NotImplementedError("Triggers must implement serialize()") @abc.abstractmethod - async def run(self) -> AsyncIterator["TriggerEvent"]: + async def run(self) -> AsyncIterator[TriggerEvent]: """ Runs the trigger in an asynchronous context. diff --git a/airflow/triggers/external_task.py b/airflow/triggers/external_task.py index 883753401c58f..6099dc0a37014 100644 --- a/airflow/triggers/external_task.py +++ b/airflow/triggers/external_task.py @@ -72,7 +72,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: }, ) - async def run(self) -> typing.AsyncIterator["TriggerEvent"]: + async def run(self) -> typing.AsyncIterator[TriggerEvent]: """ Checks periodically in the database to see if the task exists, and has hit one of the states yet, or not. @@ -136,7 +136,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: }, ) - async def run(self) -> typing.AsyncIterator["TriggerEvent"]: + async def run(self) -> typing.AsyncIterator[TriggerEvent]: """ Checks periodically in the database to see if the dag run exists, and has hit one of the states yet, or not. diff --git a/airflow/triggers/file.py b/airflow/triggers/file.py index e85658249fad6..4128b09814b4e 100644 --- a/airflow/triggers/file.py +++ b/airflow/triggers/file.py @@ -58,7 +58,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: }, ) - async def run(self) -> typing.AsyncIterator["TriggerEvent"]: + async def run(self) -> typing.AsyncIterator[TriggerEvent]: """Loop until the relevant files are found.""" while True: for path in glob(self.filepath, recursive=self.recursive): diff --git a/docs/exts/exampleinclude.py b/docs/exts/exampleinclude.py index 30ca52057dd41..7a4498d4e7a1a 100644 --- a/docs/exts/exampleinclude.py +++ b/docs/exts/exampleinclude.py @@ -36,7 +36,7 @@ from sphinx.util.nodes import set_source_info try: - import sphinx_airflow_theme # noqa: autoflake + import sphinx_airflow_theme airflow_theme_is_available = True except ImportError: diff --git a/scripts/ci/pre_commit/pre_commit_insert_extras.py b/scripts/ci/pre_commit/pre_commit_insert_extras.py index 3e08bd674d71b..fac926f611caa 100755 --- a/scripts/ci/pre_commit/pre_commit_insert_extras.py +++ b/scripts/ci/pre_commit/pre_commit_insert_extras.py @@ -27,8 +27,8 @@ sys.path.insert(0, str(AIRFLOW_SOURCES_DIR)) # make sure setup is imported from Airflow # flake8: noqa: F401 -from common_precommit_utils import insert_documentation # isort: skip # noqa E402 -from setup import EXTRAS_DEPENDENCIES # isort:skip # noqa +from common_precommit_utils import insert_documentation # isort: skip +from setup import EXTRAS_DEPENDENCIES # isort:skip sys.path.append(str(AIRFLOW_SOURCES_DIR)) diff --git a/scripts/ci/pre_commit/pre_commit_local_yml_mounts.py b/scripts/ci/pre_commit/pre_commit_local_yml_mounts.py index 6efba5a6aa088..e39fe50e0e367 100755 --- a/scripts/ci/pre_commit/pre_commit_local_yml_mounts.py +++ b/scripts/ci/pre_commit/pre_commit_local_yml_mounts.py @@ -22,16 +22,16 @@ sys.path.insert(0, str(Path(__file__).parent.resolve())) # make sure common_precommit_utils is imported -from common_precommit_utils import AIRFLOW_SOURCES_ROOT_PATH # isort: skip # noqa E402 +from common_precommit_utils import AIRFLOW_SOURCES_ROOT_PATH # isort: skip sys.path.insert(0, str(AIRFLOW_SOURCES_ROOT_PATH)) # make sure setup is imported from Airflow sys.path.insert( 0, str(AIRFLOW_SOURCES_ROOT_PATH / "dev" / "breeze" / "src") ) # make sure setup is imported from Airflow # flake8: noqa: F401 -from airflow_breeze.utils.docker_command_utils import VOLUMES_FOR_SELECTED_MOUNTS # isort: skip # noqa E402 +from airflow_breeze.utils.docker_command_utils import VOLUMES_FOR_SELECTED_MOUNTS # isort: skip -from common_precommit_utils import insert_documentation # isort: skip # noqa E402 +from common_precommit_utils import insert_documentation # isort: skip sys.path.append(str(AIRFLOW_SOURCES_ROOT_PATH)) diff --git a/tests/cli/conftest.py b/tests/cli/conftest.py index 9a9f7c0415c35..0a0e2ec5cb99a 100644 --- a/tests/cli/conftest.py +++ b/tests/cli/conftest.py @@ -27,7 +27,7 @@ from tests.test_utils.config import conf_vars # Create custom executors here because conftest is imported first -custom_executor_module = type(sys)("custom_executor") # noqa +custom_executor_module = type(sys)("custom_executor") custom_executor_module.CustomCeleryExecutor = type( # type: ignore "CustomCeleryExecutor", (celery_executor.CeleryExecutor,), {} ) diff --git a/tests/system/providers/google/cloud/bigtable/example_bigtable.py b/tests/system/providers/google/cloud/bigtable/example_bigtable.py index b105634118254..4f06c9c2fa724 100644 --- a/tests/system/providers/google/cloud/bigtable/example_bigtable.py +++ b/tests/system/providers/google/cloud/bigtable/example_bigtable.py @@ -40,7 +40,7 @@ See https://googleapis.github.io/google-cloud-python/latest/bigtable/instance.html#google.cloud.bigtable.instance.Instance.cluster * CBT_TABLE_ID - desired ID of the Table * CBT_POKE_INTERVAL - number of seconds between every attempt of Sensor check -""" # noqa: E501 +""" from __future__ import annotations import os